Merge pull request #9 from doubaniux/minor-ui-improvements

Minor UI improvements & other fixes
This commit is contained in:
Henri Dickson 2022-12-19 09:00:10 -05:00 committed by GitHub
commit e8b0b85f37
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 28 additions and 4 deletions

3
.gitignore vendored
View file

@ -28,7 +28,8 @@ migrations/
/log
log
# conf folder for neodb
# conf folders
/conf
/neodb
# typesense folder

View file

@ -14,6 +14,7 @@
display: none
position: fixed
width: 500px
max-width: 100vw
top: 50%
left: 50%
transform: translate(-50%, -50%)

View file

@ -54,6 +54,22 @@
{% if request.GET.q and user.is_authenticated %}
<li class="entity-list__entity" hx-get="{% url 'common:external_search' %}?q={{ request.GET.q }}&c={{ request.GET.c }}&page={% if pagination.current_page %}{{ pagination.current_page }}{% else %}1{% endif %}" hx-trigger="load" hx-swap="outerHTML">
{% trans '正在实时搜索站外条目' %}
<div id="spinner">
<div class="spinner">
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
<div></div>
</div>
</div>
</li>
{% endif %}
</ul>

View file

@ -100,8 +100,12 @@ class DoubanPatcherMixin:
get(url, 60)
else:
error = error + '\nScraperAPI: '
# get(f'http://api.scraperapi.com?api_key={settings.SCRAPERAPI_KEY}&url={url}', 60)
get(f'http://api.scrapestack.com/scrape?access_key={settings.SCRAPESTACK_KEY}&url={url}', 60)
if settings.SCRAPESTACK_KEY is not None:
dl_url = f'http://api.scrapestack.com/scrape?access_key={settings.SCRAPESTACK_KEY}&url={url}'
elif settings.SCRAPERAPI_KEY is not None:
dl_url = f'http://api.scraperapi.com?api_key={settings.SCRAPERAPI_KEY}&url={url}'
get(dl_url, 60)
check_content()
wayback_cdx()
@ -121,9 +125,11 @@ class DoubanPatcherMixin:
ext = None
dl_url = url
if settings.SCRAPESTACK_KEY is not None:
dl_url = f'http://api.scrapestack.com/scrape?access_key={settings.SCRAPESTACK_KEY}&url={url}'
# f'http://api.scraperapi.com?api_key={settings.SCRAPERAPI_KEY}&url={url}'
elif settings.SCRAPERAPI_KEY is not None:
dl_url = f'http://api.scraperapi.com?api_key={settings.SCRAPERAPI_KEY}&url={url}'
try:
img_response = requests.get(dl_url, timeout=90)