2023-11-24 20:41:28 -05:00
|
|
|
import django
|
2023-09-02 20:53:32 +00:00
|
|
|
from django.conf import settings
|
2023-11-24 20:41:28 -05:00
|
|
|
from django.core.checks import Error, Warning
|
2023-09-02 20:53:32 +00:00
|
|
|
from loguru import logger
|
|
|
|
|
2023-09-25 23:22:34 +00:00
|
|
|
from catalog.search.models import Indexer
|
2023-11-19 10:59:51 -05:00
|
|
|
from common.models import JobManager
|
2023-09-02 20:53:32 +00:00
|
|
|
from takahe.models import Config as TakaheConfig
|
|
|
|
from takahe.models import Domain as TakaheDomain
|
2023-09-03 21:25:30 +00:00
|
|
|
from takahe.models import Follow as TakaheFollow
|
2023-09-02 20:53:32 +00:00
|
|
|
from takahe.models import Identity as TakaheIdentity
|
2024-01-15 13:09:12 -05:00
|
|
|
from takahe.models import Relay as TakaheRelay
|
2023-09-02 20:53:32 +00:00
|
|
|
from takahe.models import User as TakaheUser
|
2023-09-03 21:25:30 +00:00
|
|
|
from takahe.utils import Takahe
|
2023-09-02 20:53:32 +00:00
|
|
|
from users.models import User
|
|
|
|
|
|
|
|
|
|
|
|
class Setup:
|
|
|
|
"""
|
|
|
|
Post-Migration Setup
|
|
|
|
"""
|
|
|
|
|
|
|
|
def create_site(self, domain, service_domain):
|
2023-11-26 17:23:53 -05:00
|
|
|
TakaheDomain.objects.update_or_create(
|
2023-09-02 20:53:32 +00:00
|
|
|
domain=domain,
|
2023-11-26 17:23:53 -05:00
|
|
|
defaults={
|
|
|
|
"local": True,
|
|
|
|
"service_domain": service_domain,
|
|
|
|
"notes": "NeoDB",
|
|
|
|
"nodeinfo": {},
|
|
|
|
"state": "updated",
|
|
|
|
},
|
2023-09-02 20:53:32 +00:00
|
|
|
)
|
|
|
|
TakaheConfig.objects.update_or_create(
|
|
|
|
key="public_timeline",
|
|
|
|
user=None,
|
|
|
|
identity=None,
|
|
|
|
domain=None,
|
|
|
|
defaults={"json": False},
|
|
|
|
)
|
|
|
|
|
|
|
|
def sync_site_config(self):
|
|
|
|
domain = settings.SITE_INFO["site_domain"]
|
|
|
|
if not domain:
|
|
|
|
raise ValueError("Panic: site_domain is not set!")
|
|
|
|
icon = settings.SITE_INFO["site_logo"]
|
|
|
|
name = settings.SITE_INFO["site_name"]
|
|
|
|
service_domain = settings.SITE_INFO.get("site_service_domain")
|
|
|
|
|
|
|
|
if not TakaheDomain.objects.filter(domain=domain).exists():
|
|
|
|
logger.info(f"Domain {domain} not found, creating...")
|
|
|
|
self.create_site(domain, service_domain)
|
|
|
|
if (
|
|
|
|
TakaheIdentity.objects.filter(local=True)
|
|
|
|
.exclude(domain_id__isnull=True)
|
|
|
|
.exists()
|
|
|
|
):
|
|
|
|
logger.warning(
|
2024-04-06 00:13:50 -04:00
|
|
|
"Local identities are found for other domains, there might be a configuration issue."
|
2023-09-02 20:53:32 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
TakaheConfig.objects.update_or_create(
|
|
|
|
key="site_name",
|
|
|
|
user=None,
|
|
|
|
identity=None,
|
|
|
|
domain=None,
|
|
|
|
defaults={"json": name},
|
|
|
|
)
|
|
|
|
TakaheConfig.objects.update_or_create(
|
|
|
|
key="site_name",
|
|
|
|
user=None,
|
|
|
|
identity=None,
|
|
|
|
domain_id=domain,
|
|
|
|
defaults={"json": name},
|
|
|
|
)
|
|
|
|
TakaheConfig.objects.update_or_create(
|
|
|
|
key="site_icon",
|
|
|
|
user=None,
|
|
|
|
identity=None,
|
|
|
|
domain_id=None,
|
|
|
|
defaults={"json": icon},
|
|
|
|
)
|
|
|
|
TakaheConfig.objects.update_or_create(
|
|
|
|
key="site_icon",
|
|
|
|
user=None,
|
|
|
|
identity=None,
|
|
|
|
domain_id=domain,
|
|
|
|
defaults={"json": icon},
|
|
|
|
)
|
|
|
|
|
|
|
|
def sync_admin_user(self):
|
|
|
|
users = User.objects.filter(username__in=settings.SETUP_ADMIN_USERNAMES)
|
|
|
|
for user in users:
|
|
|
|
if user.is_superuser:
|
|
|
|
logger.debug(f"User {user.username} is already admin")
|
|
|
|
else:
|
|
|
|
user.is_superuser = True
|
|
|
|
user.save(update_fields=["is_superuser"])
|
|
|
|
TakaheUser.objects.filter(email=f"@{user.username}").update(admin=True)
|
|
|
|
logger.info(f"Updated user {user.username} as admin")
|
|
|
|
|
2023-09-03 21:25:30 +00:00
|
|
|
def sync_relay(self):
|
2024-01-15 13:09:12 -05:00
|
|
|
relay = TakaheRelay.objects.filter(
|
|
|
|
state__in=["new", "subscribing", "subscribed"],
|
|
|
|
inbox_uri=settings.DEFAULT_RELAY_SERVER,
|
2023-09-03 21:25:30 +00:00
|
|
|
).first()
|
|
|
|
if settings.DISABLE_DEFAULT_RELAY:
|
2024-01-15 13:09:12 -05:00
|
|
|
if relay:
|
2023-09-03 21:25:30 +00:00
|
|
|
logger.info("Default relay is disabled, unsubscribing...")
|
2024-01-15 13:09:12 -05:00
|
|
|
Takahe.update_state(relay, "unsubscribing")
|
2023-09-03 21:25:30 +00:00
|
|
|
else:
|
2024-04-06 00:13:50 -04:00
|
|
|
logger.info("Default relay is disabled.")
|
2023-09-03 21:25:30 +00:00
|
|
|
else:
|
2024-01-15 13:09:12 -05:00
|
|
|
if relay:
|
|
|
|
logger.debug(f"Default relay is enabled, state: {relay.state}")
|
2023-09-03 21:25:30 +00:00
|
|
|
else:
|
|
|
|
logger.info("Default relay is enabled, subscribing...")
|
2024-01-15 13:09:12 -05:00
|
|
|
TakaheRelay.objects.update_or_create(
|
|
|
|
inbox_uri=settings.DEFAULT_RELAY_SERVER,
|
|
|
|
defaults={"state": "new"},
|
2023-09-03 21:25:30 +00:00
|
|
|
)
|
|
|
|
|
2023-09-02 20:53:32 +00:00
|
|
|
def run(self):
|
|
|
|
logger.info("Running post-migration setup...")
|
|
|
|
# Update site name if changed
|
|
|
|
self.sync_site_config()
|
|
|
|
|
|
|
|
# Create/update admin user if configured in env
|
|
|
|
self.sync_admin_user()
|
|
|
|
|
2023-09-03 21:25:30 +00:00
|
|
|
# Subscribe to default relay if enabled
|
|
|
|
self.sync_relay()
|
|
|
|
|
2023-09-02 20:53:32 +00:00
|
|
|
# Create basic emoji if not exists
|
|
|
|
|
|
|
|
# Create search index if not exists
|
|
|
|
Indexer.init()
|
|
|
|
|
|
|
|
# Register cron jobs if not yet
|
2024-01-14 13:17:58 -05:00
|
|
|
if settings.DISABLE_CRON_JOBS and "*" in settings.DISABLE_CRON_JOBS:
|
2023-11-22 20:55:45 -05:00
|
|
|
logger.info("Cron jobs are disabled.")
|
2023-11-26 17:23:53 -05:00
|
|
|
JobManager.cancel_all()
|
2023-11-22 20:55:45 -05:00
|
|
|
else:
|
2023-11-26 17:23:53 -05:00
|
|
|
JobManager.reschedule_all()
|
2023-09-02 20:53:32 +00:00
|
|
|
|
|
|
|
logger.info("Finished post-migration setup.")
|
2023-11-24 20:41:28 -05:00
|
|
|
|
|
|
|
def check(self):
|
|
|
|
from redis import Redis
|
|
|
|
|
|
|
|
errors = []
|
|
|
|
# check env
|
|
|
|
domain = settings.SITE_INFO.get("site_domain")
|
|
|
|
if not domain:
|
|
|
|
errors.append(
|
|
|
|
Error(
|
|
|
|
"SITE DOMAIN is not specified",
|
|
|
|
hint="Check NEODB_SITE_DOMAIN in .env",
|
|
|
|
id="neodb.E001",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# check redis
|
|
|
|
try:
|
|
|
|
redis = Redis.from_url(settings.REDIS_URL)
|
|
|
|
if not redis:
|
|
|
|
raise Exception("Redis unavailable")
|
|
|
|
redis.ping()
|
|
|
|
except Exception as e:
|
|
|
|
errors.append(
|
|
|
|
Error(
|
|
|
|
f"Error while connecting to redis: {e}",
|
2023-11-25 00:26:16 -05:00
|
|
|
hint="Check NEODB_REDIS_URL/TAKAHE_CACHES_DEFAULT in .env",
|
2023-11-24 20:41:28 -05:00
|
|
|
id="neodb.E002",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# check indexer
|
|
|
|
try:
|
|
|
|
Indexer.check()
|
|
|
|
except Exception as e:
|
|
|
|
errors.append(
|
|
|
|
Error(
|
2023-11-25 00:26:16 -05:00
|
|
|
f"Error while connecting to search index server: {e}",
|
|
|
|
hint='Check NEODB_SEARCH_URL in .env, and run "neodb-manage migration"',
|
2023-11-24 20:41:28 -05:00
|
|
|
id="neodb.E003",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# check takahe
|
|
|
|
try:
|
|
|
|
if not TakaheDomain.objects.filter(domain=domain).exists():
|
|
|
|
errors.append(
|
|
|
|
Warning(
|
|
|
|
f"Domain {domain} not found in takahe database",
|
|
|
|
hint="Run migration once to create the domain",
|
|
|
|
id="neodb.W001",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
errors.append(
|
|
|
|
Error(
|
2023-11-25 00:26:16 -05:00
|
|
|
f"Error while querying takahe database: {e}",
|
|
|
|
hint='Check TAKAHE_DB_URL/TAKAHE_DATABASE_SERVER in .env, and run "takahe-manage migration"',
|
2023-11-24 20:41:28 -05:00
|
|
|
id="neodb.E004",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return errors
|