more search commands
This commit is contained in:
parent
06c3ba6f3b
commit
73a9f023a1
3 changed files with 66 additions and 10 deletions
|
@ -1,11 +1,9 @@
|
|||
import logging
|
||||
import meilisearch
|
||||
from django.conf import settings
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
|
||||
|
||||
# TODO
|
||||
# use post_save, post_delete
|
||||
# search result translate back to model
|
||||
INDEX_NAME = 'items'
|
||||
INDEX_SEARCHABLE_ATTRIBUTES = ['title', 'orig_title', 'other_title', 'subtitle', 'artist', 'author', 'translator', 'developer', 'director', 'actor', 'playwright', 'brief', 'contents', 'track_list', 'pub_house', 'company', 'publisher', 'isbn', 'imdb_code', 'UPC', 'TMDB_ID', 'BANDCAMP_ALBUM_ID']
|
||||
INDEXABLE_DIRECT_TYPES = ['BigAutoField', 'BooleanField', 'CharField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'TextField', 'ArrayField']
|
||||
|
@ -16,6 +14,9 @@ INDEXABLE_FLOAT_TYPES = ['DecimalField']
|
|||
SEARCH_PAGE_SIZE = 20
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def item_post_save_handler(sender, instance, **kwargs):
|
||||
Indexer.replace_item(instance)
|
||||
|
||||
|
@ -98,12 +99,18 @@ class Indexer:
|
|||
|
||||
@classmethod
|
||||
def replace_item(self, obj):
|
||||
self.instance().add_documents([self.obj_to_dict(obj)])
|
||||
try:
|
||||
self.instance().add_documents([self.obj_to_dict(obj)])
|
||||
except Exception as e:
|
||||
logger.error(f"replace item error: \n{e}")
|
||||
|
||||
@classmethod
|
||||
def delete_item(self, obj):
|
||||
pk = f'{obj.__class__.__name__}-{obj.id}'
|
||||
self.instance().delete_document(pk)
|
||||
try:
|
||||
self.instance().delete_document(pk)
|
||||
except Exception as e:
|
||||
logger.error(f"delete item error: \n{e}")
|
||||
|
||||
@classmethod
|
||||
def patch_item(self, obj, fields):
|
||||
|
@ -111,7 +118,10 @@ class Indexer:
|
|||
data = {}
|
||||
for f in fields:
|
||||
data[f] = getattr(obj, f)
|
||||
self.instance().update_documents(documents=[data], primary_key=[pk])
|
||||
try:
|
||||
self.instance().update_documents(documents=[data], primary_key=[pk])
|
||||
except Exception as e:
|
||||
logger.error(f"patch item error: \n{e}")
|
||||
|
||||
@classmethod
|
||||
def search(self, q, page=1, category=None, tag=None, sort=None):
|
||||
|
|
43
common/management/commands/index_stats.py
Normal file
43
common/management/commands/index_stats.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from common.index import Indexer, INDEX_NAME
|
||||
from django.conf import settings
|
||||
from movies.models import Movie
|
||||
from books.models import Book
|
||||
from games.models import Game
|
||||
from music.models import Album, Song
|
||||
from django.core.paginator import Paginator
|
||||
from tqdm import tqdm
|
||||
from time import sleep
|
||||
from datetime import timedelta
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
BATCH_SIZE = 10000
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check search index'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
print(f'Connecting to search server {settings.MEILISEARCH_SERVER} for index: {INDEX_NAME}')
|
||||
stats = Indexer.get_stats()
|
||||
print(stats)
|
||||
st = Indexer.instance().get_all_update_status()
|
||||
cnt = {"enqueued": [0, 0], "processing": [0, 0], "processed": [0, 0]}
|
||||
lastEnq = {"enqueuedAt": ""}
|
||||
lastProc = {"enqueuedAt": ""}
|
||||
for s in st:
|
||||
n = s["type"].get("number")
|
||||
cnt[s["status"]][0] += 1
|
||||
cnt[s["status"]][1] += n if n else 0
|
||||
if s["status"] == "processing":
|
||||
print(s)
|
||||
elif s["status"] == "enqueued":
|
||||
if s["enqueuedAt"] > lastEnq["enqueuedAt"]:
|
||||
lastEnq = s
|
||||
elif s["status"] == "processed":
|
||||
if s["enqueuedAt"] > lastProc["enqueuedAt"]:
|
||||
lastProc = s
|
||||
print(lastEnq)
|
||||
print(lastProc)
|
||||
print(cnt)
|
|
@ -12,7 +12,7 @@ from datetime import timedelta
|
|||
from django.utils import timezone
|
||||
|
||||
|
||||
BATCH_SIZE = 10000
|
||||
BATCH_SIZE = 1000
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
@ -24,9 +24,11 @@ class Command(BaseCommand):
|
|||
def handle(self, *args, **options):
|
||||
h = int(options['hours'])
|
||||
print(f'Connecting to search server {settings.MEILISEARCH_SERVER} for index: {INDEX_NAME}')
|
||||
Indexer.update_settings()
|
||||
self.stdout.write(self.style.SUCCESS('Index settings updated.'))
|
||||
for c in [Game, Movie, Book, Album, Song]:
|
||||
if Indexer.get_stats()['isIndexing']:
|
||||
print('Please wait for previous updates')
|
||||
# Indexer.update_settings()
|
||||
# self.stdout.write(self.style.SUCCESS('Index settings updated.'))
|
||||
for c in [Book, Song, Album, Game, Movie]:
|
||||
print(f'Re-indexing {c}')
|
||||
qs = c.objects.all() if h == 0 else c.objects.filter(edited_time__gt=timezone.now() - timedelta(hours=h))
|
||||
pg = Paginator(qs.order_by('id'), BATCH_SIZE)
|
||||
|
@ -36,3 +38,4 @@ class Command(BaseCommand):
|
|||
Indexer.instance().update_documents(documents=items)
|
||||
while Indexer.get_stats()['isIndexing']:
|
||||
sleep(0.5)
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue