lib.itmens/common/setup.py

198 lines
6.4 KiB
Python
Raw Normal View History

from django.conf import settings
from django.core.checks import Error, Warning
from loguru import logger
2023-09-25 23:22:34 +00:00
from catalog.search.models import Indexer
2023-11-19 10:59:51 -05:00
from common.models import JobManager
from takahe.models import Config as TakaheConfig
from takahe.models import Domain as TakaheDomain
from takahe.models import Identity as TakaheIdentity
2024-01-15 13:09:12 -05:00
from takahe.models import Relay as TakaheRelay
from takahe.models import User as TakaheUser
from takahe.utils import Takahe
from users.models import User
class Setup:
"""
Post-Migration Setup
"""
def create_site(self, domain, service_domain):
2023-11-26 17:23:53 -05:00
TakaheDomain.objects.update_or_create(
domain=domain,
2023-11-26 17:23:53 -05:00
defaults={
"local": True,
"service_domain": service_domain,
"notes": "NeoDB",
"nodeinfo": {},
"state": "updated",
},
)
TakaheConfig.objects.update_or_create(
key="public_timeline",
user=None,
identity=None,
domain=None,
defaults={"json": False},
)
def sync_site_config(self):
domain = settings.SITE_INFO["site_domain"]
if not domain:
raise ValueError("Panic: site_domain is not set!")
icon = settings.SITE_INFO["site_logo"]
name = settings.SITE_INFO["site_name"]
service_domain = settings.SITE_INFO.get("site_service_domain")
if not TakaheDomain.objects.filter(domain=domain).exists():
logger.info(f"Domain {domain} not found, creating...")
self.create_site(domain, service_domain)
if (
TakaheIdentity.objects.filter(local=True)
.exclude(domain_id__isnull=True)
.exists()
):
logger.warning(
2024-04-06 00:13:50 -04:00
"Local identities are found for other domains, there might be a configuration issue."
)
TakaheConfig.objects.update_or_create(
key="site_name",
user=None,
identity=None,
domain=None,
defaults={"json": name},
)
TakaheConfig.objects.update_or_create(
key="site_name",
user=None,
identity=None,
domain_id=domain,
defaults={"json": name},
)
TakaheConfig.objects.update_or_create(
key="site_icon",
user=None,
identity=None,
domain_id=None,
defaults={"json": icon},
)
TakaheConfig.objects.update_or_create(
key="site_icon",
user=None,
identity=None,
domain_id=domain,
defaults={"json": icon},
)
def sync_relay(self):
2024-01-15 13:09:12 -05:00
relay = TakaheRelay.objects.filter(
state__in=["new", "subscribing", "subscribed"],
inbox_uri=settings.DEFAULT_RELAY_SERVER,
).first()
if settings.DISABLE_DEFAULT_RELAY:
2024-01-15 13:09:12 -05:00
if relay:
logger.info("Default relay is disabled, unsubscribing...")
2024-01-15 13:09:12 -05:00
Takahe.update_state(relay, "unsubscribing")
else:
2024-04-06 00:13:50 -04:00
logger.info("Default relay is disabled.")
else:
2024-01-15 13:09:12 -05:00
if relay:
logger.debug(f"Default relay is enabled, state: {relay.state}")
else:
logger.info("Default relay is enabled, subscribing...")
2024-01-15 13:09:12 -05:00
TakaheRelay.objects.update_or_create(
inbox_uri=settings.DEFAULT_RELAY_SERVER,
defaults={"state": "new"},
)
def run(self):
if settings.TESTING:
# Only do necessary initialization when testing
logger.info("Running minimal post-migration setup for testing...")
self.sync_site_config()
Indexer.init()
return
logger.info("Running post-migration setup...")
# Update site name if changed
self.sync_site_config()
# Subscribe to default relay if enabled
self.sync_relay()
# Create basic emoji if not exists
# Create search index if not exists
Indexer.init()
# Register cron jobs if not yet
2024-01-14 13:17:58 -05:00
if settings.DISABLE_CRON_JOBS and "*" in settings.DISABLE_CRON_JOBS:
2023-11-22 20:55:45 -05:00
logger.info("Cron jobs are disabled.")
2023-11-26 17:23:53 -05:00
JobManager.cancel_all()
2023-11-22 20:55:45 -05:00
else:
2023-11-26 17:23:53 -05:00
JobManager.reschedule_all()
logger.info("Finished post-migration setup.")
def check(self):
from redis import Redis
errors = []
# check env
domain = settings.SITE_INFO.get("site_domain")
if not domain:
errors.append(
Error(
"SITE DOMAIN is not specified",
hint="Check NEODB_SITE_DOMAIN in .env",
id="neodb.E001",
)
)
# check redis
try:
redis = Redis.from_url(settings.REDIS_URL)
if not redis:
raise Exception("Redis unavailable")
redis.ping()
except Exception as e:
errors.append(
Error(
f"Error while connecting to redis: {e}",
hint="Check NEODB_REDIS_URL/TAKAHE_CACHES_DEFAULT in .env",
id="neodb.E002",
)
)
# check indexer
try:
Indexer.check()
except Exception as e:
errors.append(
Error(
f"Error while connecting to search index server: {e}",
hint='Check NEODB_SEARCH_URL in .env, and run "neodb-manage migration"',
id="neodb.E003",
)
)
# check takahe
try:
if not TakaheDomain.objects.filter(domain=domain).exists():
errors.append(
Warning(
f"Domain {domain} not found in takahe database",
hint="Run migration once to create the domain",
id="neodb.W001",
)
)
except Exception as e:
errors.append(
Error(
f"Error while querying takahe database: {e}",
hint='Check TAKAHE_DB_URL/TAKAHE_DATABASE_SERVER in .env, and run "takahe-manage migration"',
id="neodb.E004",
)
)
return errors