From 63b1fbe5b49197bbda23ca342c95ed3b0c2efce8 Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 20 Oct 2023 01:11:14 -0400 Subject: [PATCH 01/43] fix 500 --- catalog/tv/models.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/catalog/tv/models.py b/catalog/tv/models.py index 467234d5..525f7e27 100644 --- a/catalog/tv/models.py +++ b/catalog/tv/models.py @@ -437,7 +437,7 @@ class TVEpisode(Item): @property def display_title(self): - return f"{self.season.display_title} 第{self.episode_number}集" # TODO i18n + return f"{self.season.display_title if self.season else ''} 第{self.episode_number}集" # TODO i18n @property def parent_item(self): From 239ad4271a262e8dadc69ae0afaa1a7834986681 Mon Sep 17 00:00:00 2001 From: Your Name Date: Thu, 20 Jul 2023 21:59:49 -0400 Subject: [PATCH 02/43] takahe integration --- .github/workflows/django.yml | 26 +- boofilsic/settings.py | 32 +- catalog/book/models.py | 2 +- catalog/common/__init__.py | 1 + catalog/common/downloaders.py | 15 +- catalog/common/models.py | 62 +- catalog/common/sites.py | 40 +- catalog/common/utils.py | 2 +- catalog/management/commands/cat.py | 15 +- catalog/management/commands/crawl.py | 15 +- catalog/management/commands/discover.py | 6 +- catalog/management/commands/index.py | 4 +- ...alter_externalresource_id_type_and_more.py | 117 ++ catalog/search/external.py | 3 +- catalog/search/typesense.py | 5 +- catalog/search/views.py | 5 + catalog/sites/__init__.py | 5 +- catalog/sites/fedi.py | 101 ++ catalog/sites/rss.py | 5 +- catalog/templates/_item_card.html | 2 +- .../templates/_item_card_metadata_base.html | 2 +- catalog/templates/_item_comments.html | 2 +- .../templates/_item_comments_by_episode.html | 2 +- catalog/templates/_item_reviews.html | 2 +- catalog/templates/_item_user_pieces.html | 6 +- catalog/templates/_sidebar_edit.html | 2 +- catalog/templates/item_base.html | 2 +- catalog/templates/item_mark_list.html | 2 +- catalog/templates/item_review_list.html | 2 +- catalog/urls.py | 5 +- catalog/views.py | 30 +- common/static/scss/_sitelabel.scss | 6 + common/templates/_sidebar.html | 2 +- common/templatetags/mastodon.py | 27 +- common/urls.py | 3 +- common/utils.py | 15 +- common/views.py | 6 +- doc/install.md | 2 +- journal/api.py | 23 +- journal/exporters/doufen.py | 14 +- journal/importers/douban.py | 2 +- journal/management/commands/journal.py | 5 +- ...ions_piece_local_piece_post_id_and_more.py | 50 + ...r_collection_featured_by_users_and_more.py | 111 ++ journal/models/__init__.py | 10 +- journal/models/collection.py | 24 +- journal/models/comment.py | 44 +- journal/models/common.py | 100 +- journal/models/itemlist.py | 21 +- journal/models/like.py | 26 +- journal/models/mark.py | 97 +- journal/models/mixins.py | 50 +- journal/models/rating.py | 61 +- journal/models/renderers.py | 8 +- journal/models/review.py | 18 +- journal/models/shelf.py | 85 +- journal/models/tag.py | 42 +- journal/models/utils.py | 37 +- journal/templates/_list_item.html | 4 +- journal/templates/profile.html | 4 +- journal/templates/review.html | 2 +- journal/templates/user_collection_list.html | 2 +- journal/templatetags/collection.py | 16 +- journal/templatetags/user_actions.py | 13 +- journal/tests.py | 94 +- journal/views/collection.py | 109 +- journal/views/common.py | 66 +- journal/views/mark.py | 59 +- journal/views/profile.py | 61 +- journal/views/review.py | 33 +- journal/views/tag.py | 26 +- mastodon/api.py | 12 +- pyproject.toml | 2 +- requirements-dev.txt | 1 + requirements.txt | 4 +- .../0007_alter_localactivity_owner.py | 22 + social/models.py | 18 +- .../activity/comment_child_item.html | 2 +- social/templates/activity/mark_item.html | 2 +- social/templates/activity/review_item.html | 2 +- social/tests.py | 95 +- social/views.py | 5 +- takahe/__init__.py | 0 takahe/admin.py | 3 + takahe/ap_handlers.py | 123 ++ takahe/apps.py | 6 + takahe/db_routes.py | 27 + takahe/html.py | 379 +++++ takahe/management/commands/takahe.py | 42 + takahe/migrations/0001_initial.py | 489 ++++++ takahe/migrations/__init__.py | 0 takahe/models.py | 1395 +++++++++++++++++ takahe/tests.py | 3 + takahe/uris.py | 89 ++ takahe/utils.py | 486 ++++++ takahe/views.py | 3 + users/account.py | 5 +- .../management/commands/refresh_following.py | 21 - users/migrations/0012_apidentity.py | 63 + users/migrations/0013_init_identity.py | 77 + users/models/__init__.py | 1 + users/models/apidentity.py | 192 +++ users/models/preference.py | 1 + users/models/report.py | 15 - users/models/user.py | 334 +--- users/tasks.py | 2 - users/tests.py | 180 +-- 107 files changed, 4872 insertions(+), 1127 deletions(-) create mode 100644 catalog/migrations/0011_alter_externalresource_id_type_and_more.py create mode 100644 catalog/sites/fedi.py create mode 100644 journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py create mode 100644 journal/migrations/0015_alter_collection_featured_by_users_and_more.py create mode 100644 social/migrations/0007_alter_localactivity_owner.py create mode 100644 takahe/__init__.py create mode 100644 takahe/admin.py create mode 100644 takahe/ap_handlers.py create mode 100644 takahe/apps.py create mode 100644 takahe/db_routes.py create mode 100644 takahe/html.py create mode 100644 takahe/management/commands/takahe.py create mode 100644 takahe/migrations/0001_initial.py create mode 100644 takahe/migrations/__init__.py create mode 100644 takahe/models.py create mode 100644 takahe/tests.py create mode 100644 takahe/uris.py create mode 100644 takahe/utils.py create mode 100644 takahe/views.py delete mode 100644 users/management/commands/refresh_following.py create mode 100644 users/migrations/0012_apidentity.py create mode 100644 users/migrations/0013_init_identity.py create mode 100644 users/models/apidentity.py diff --git a/.github/workflows/django.yml b/.github/workflows/django.yml index 1e7a948e..fe7611dc 100644 --- a/.github/workflows/django.yml +++ b/.github/workflows/django.yml @@ -1,4 +1,4 @@ -name: all tests +name: tests on: push: @@ -6,8 +6,7 @@ on: branches: [ "main" ] jobs: - build: - + django: runs-on: ubuntu-latest services: redis: @@ -15,20 +14,25 @@ jobs: ports: - 6379:6379 db: - image: postgres:12.13-alpine + image: postgres env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: admin123 - POSTGRES_DB: test + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb ports: - 5432:5432 - options: --mount type=tmpfs,destination=/var/lib/postgresql/data --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - + db2: + image: postgres + env: + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb_takahe + ports: + - 15432:5432 strategy: max-parallel: 4 matrix: - python-version: ['3.10', '3.11'] - + python-version: ['3.11'] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/boofilsic/settings.py b/boofilsic/settings.py index 4a2d965d..6e23f630 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -1,6 +1,11 @@ import os +# import django_stubs_ext + +# django_stubs_ext.monkeypatch() + NEODB_VERSION = "0.8" +DATABASE_ROUTERS = ["takahe.db_routes.TakaheRouter"] PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__)) @@ -65,6 +70,7 @@ "journal.apps.JournalConfig", "social.apps.SocialConfig", "developer.apps.DeveloperConfig", + "takahe.apps.TakaheConfig", "legacy.apps.LegacyConfig", ] @@ -110,6 +116,8 @@ WSGI_APPLICATION = "boofilsic.wsgi.application" +SESSION_COOKIE_NAME = "neodbsid" + CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", @@ -131,7 +139,25 @@ "client_encoding": "UTF8", # 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_DEFAULT, }, - } + "TEST": { + "DEPENDENCIES": ["takahe"], + }, + }, + "takahe": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("TAKAHE_DB_NAME", "test_neodb_takahe"), + "USER": os.environ.get("TAKAHE_DB_USER", "testuser"), + "PASSWORD": os.environ.get("TAKAHE_DB_PASSWORD", "testpass"), + "HOST": os.environ.get("TAKAHE_DB_HOST", "127.0.0.1"), + "PORT": os.environ.get("TAKAHE_DB_PORT", 15432), + "OPTIONS": { + "client_encoding": "UTF8", + # 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_DEFAULT, + }, + "TEST": { + "DEPENDENCIES": [], + }, + }, } # Customized auth backend, glue OAuth2 and Django User model together @@ -189,6 +215,8 @@ SILENCED_SYSTEM_CHECKS = [ "admin.E404", # Required by django-user-messages + "models.W035", # Required by takahe: identical table name in different database + "fields.W344", # Required by takahe: identical table name in different database ] MEDIA_URL = "/media/" @@ -358,6 +386,7 @@ if os.environ.get("NEODB_TYPESENSE_ENABLE", ""): SEARCH_BACKEND = "TYPESENSE" +TYPESENSE_INDEX_NAME = "catalog" TYPESENSE_CONNECTION = { "api_key": os.environ.get("NEODB_TYPESENSE_KEY", "insecure"), "nodes": [ @@ -371,6 +400,7 @@ } +DOWNLOADER_CACHE_TIMEOUT = 300 DOWNLOADER_RETRIES = 3 DOWNLOADER_SAVEDIR = None DISABLE_MODEL_SIGNAL = False # disable index and social feeds during importing/etc diff --git a/catalog/book/models.py b/catalog/book/models.py index 5be14f2d..d451b9bf 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -166,7 +166,7 @@ def update_linked_items_from_external_resource(self, resource): """add Work from resource.metadata['work'] if not yet""" links = resource.required_resources + resource.related_resources for w in links: - if w["model"] == "Work": + if w.get("model") == "Work": work = Work.objects.filter( primary_lookup_id_type=w["id_type"], primary_lookup_id_value=w["id_value"], diff --git a/catalog/common/__init__.py b/catalog/common/__init__.py index 51f055af..fe04dde8 100644 --- a/catalog/common/__init__.py +++ b/catalog/common/__init__.py @@ -24,6 +24,7 @@ "use_local_response", "RetryDownloader", "BasicDownloader", + "CachedDownloader", "ProxiedDownloader", "BasicImageDownloader", "ProxiedImageDownloader", diff --git a/catalog/common/downloaders.py b/catalog/common/downloaders.py index a579a1e8..31d0507d 100644 --- a/catalog/common/downloaders.py +++ b/catalog/common/downloaders.py @@ -10,6 +10,7 @@ import filetype import requests from django.conf import settings +from django.core.cache import cache from lxml import html from PIL import Image from requests import Response @@ -153,7 +154,6 @@ def validate_response(self, response): def _download(self, url) -> Tuple[DownloaderResponse | MockResponse, int]: try: if not _mock_mode: - # TODO cache = get/set from redis resp = requests.get( url, headers=self.headers, timeout=self.get_timeout() ) @@ -256,6 +256,19 @@ def download(self): raise DownloadError(self, "max out of retries") +class CachedDownloader(BasicDownloader): + def download(self): + cache_key = "dl:" + self.url + resp = cache.get(cache_key) + if resp: + self.response_type = RESPONSE_OK + else: + resp = super().download() + if self.response_type == RESPONSE_OK: + cache.set(cache_key, resp, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) + return resp + + class ImageDownloaderMixin: def __init__(self, url, referer=None): self.extention = None diff --git a/catalog/common/models.py b/catalog/common/models.py index e0818efa..cea95d55 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -13,7 +13,7 @@ from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ -from ninja import Schema +from ninja import Field, Schema from polymorphic.models import PolymorphicModel from catalog.common import jsondata @@ -46,6 +46,7 @@ class SiteName(models.TextChoices): RSS = "rss", _("RSS") Discogs = "discogs", _("Discogs") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") class IdType(models.TextChoices): @@ -90,6 +91,7 @@ class IdType(models.TextChoices): Bangumi = "bangumi", _("Bangumi") ApplePodcast = "apple_podcast", _("苹果播客") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") IdealIdTypes = [ @@ -225,6 +227,8 @@ class ExternalResourceSchema(Schema): class BaseSchema(Schema): + id: str = Field(alias="absolute_url") + type: str = Field(alias="ap_object_type") uuid: str url: str api_url: str @@ -250,7 +254,7 @@ class Item(SoftDeleteMixin, PolymorphicModel): url_path = "item" # subclass must specify this type = None # subclass must specify this parent_class = None # subclass may specify this to allow create child item - category: ItemCategory | None = None # subclass must specify this + category: ItemCategory # subclass must specify this demonstrative: "_StrOrPromise | None" = None # subclass must specify this uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True) title = models.CharField(_("标题"), max_length=1000, default="") @@ -345,6 +349,25 @@ def set_parent_item(self, value): def parent_uuid(self): return self.parent_item.uuid if self.parent_item else None + @classmethod + def get_ap_object_type(cls): + return cls.__name__ + + @property + def ap_object_type(self): + return self.get_ap_object_type() + + @property + def ap_object_ref(self): + o = { + "type": self.get_ap_object_type(), + "url": self.absolute_url, + "name": self.title, + } + if self.has_cover(): + o["image"] = self.cover_image_url + return o + def log_action(self, changes): LogEntry.objects.log_create( self, action=LogEntry.Action.UPDATE, changes=changes @@ -561,10 +584,13 @@ class ExternalResource(models.Model): edited_time = models.DateTimeField(auto_now=True) required_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links required to generate Item from this resource, e.g. parent TVShow of TVSeason related_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links related to this resource which may be fetched later, e.g. sub TVSeason of TVShow + prematched_resources = jsondata.ArrayField( + models.CharField(), null=False, blank=False, default=list + ) # links to help match an existing Item from this resource class Meta: unique_together = [["id_type", "id_value"]] @@ -585,13 +611,24 @@ def get_site(self): return SiteManager.get_site_cls_by_id_type(self.id_type) @property - def site_name(self): + def site_name(self) -> SiteName: try: - return self.get_site().SITE_NAME + site = self.get_site() + return site.SITE_NAME if site else SiteName.Unknown except: _logger.warning(f"Unknown site for {self}") return SiteName.Unknown + @property + def site_label(self): + if self.id_type == IdType.Fediverse: + from takahe.utils import Takahe + + domain = self.id_value.split("://")[1].split("/")[0] + n = Takahe.get_node_name_for_domain(domain) + return n or domain + return self.site_name.label + def update_content(self, resource_content): self.other_lookup_ids = resource_content.lookup_ids self.metadata = resource_content.metadata @@ -615,7 +652,16 @@ def get_all_lookup_ids(self): d = {k: v for k, v in d.items() if bool(v)} return d - def get_preferred_model(self) -> type[Item] | None: + def get_lookup_ids(self, default_model): + lookup_ids = self.get_all_lookup_ids() + model = self.get_item_model(default_model) + bt, bv = model.get_best_lookup_id(lookup_ids) + ids = [(t, v) for t, v in lookup_ids.items() if t and v and t != bt] + if bt and bv: + ids = [(bt, bv)] + ids + return ids + + def get_item_model(self, default_model: type[Item]) -> type[Item]: model = self.metadata.get("preferred_model") if model: m = ContentType.objects.filter( @@ -625,7 +671,7 @@ def get_preferred_model(self) -> type[Item] | None: return cast(Item, m).model_class() else: raise ValueError(f"preferred model {model} does not exist") - return None + return default_model _CONTENT_TYPE_LIST = None diff --git a/catalog/common/sites.py b/catalog/common/sites.py index d6b6f11e..1777864b 100644 --- a/catalog/common/sites.py +++ b/catalog/common/sites.py @@ -39,7 +39,7 @@ class AbstractSite: Abstract class to represent a site """ - SITE_NAME: SiteName | None = None + SITE_NAME: SiteName ID_TYPE: IdType | None = None WIKI_PROPERTY_ID: str | None = "P0undefined0" DEFAULT_MODEL: Type[Item] | None = None @@ -104,18 +104,29 @@ def query_str(content, query: str) -> str: return content.xpath(query)[0].strip() @classmethod - def get_model_for_resource(cls, resource): - model = resource.get_preferred_model() - return model or cls.DEFAULT_MODEL + def match_existing_item_for_resource( + cls, resource: ExternalResource + ) -> Item | None: + """ + try match an existing Item for a given ExternalResource - @classmethod - def match_existing_item_for_resource(cls, resource) -> Item | None: - model = cls.get_model_for_resource(resource) + order of matching: + 1. look for other ExternalResource by url in prematched_resources, if found, return the item + 2. look for Item by primary_lookup_id_type and primary_lookup_id_value + + """ + for resource_link in resource.prematched_resources: # type: ignore + url = resource_link.get("url") + if url: + matched_resource = ExternalResource.objects.filter(url=url).first() + if matched_resource and matched_resource.item: + return matched_resource.item + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None - t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) - matched = None - if t is not None: + ids = resource.get_lookup_ids(cls.DEFAULT_MODEL) + for t, v in ids: + matched = None matched = model.objects.filter( primary_lookup_id_type=t, primary_lookup_id_value=v, @@ -143,14 +154,15 @@ def match_existing_item_for_resource(cls, resource) -> Item | None: matched.primary_lookup_id_type = t matched.primary_lookup_id_value = v matched.save() - return matched + if matched: + return matched @classmethod def match_or_create_item_for_resource(cls, resource): previous_item = resource.item resource.item = cls.match_existing_item_for_resource(resource) or previous_item if resource.item is None: - model = cls.get_model_for_resource(resource) + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) @@ -243,7 +255,7 @@ def get_resource_ready( ) else: _logger.error(f"unable to get site for {linked_url}") - if p.related_resources: + if p.related_resources or p.prematched_resources: django_rq.get_queue("crawl").enqueue(crawl_related_resources_task, p.pk) if p.item: p.item.update_linked_items_from_external_resource(p) @@ -318,7 +330,7 @@ def crawl_related_resources_task(resource_pk): if not resource: _logger.warn(f"crawl resource not found {resource_pk}") return - links = resource.related_resources + links = (resource.related_resources or []) + (resource.prematched_resources or []) # type: ignore for w in links: # type: ignore try: item = None diff --git a/catalog/common/utils.py b/catalog/common/utils.py index 0882af5d..08023c09 100644 --- a/catalog/common/utils.py +++ b/catalog/common/utils.py @@ -36,4 +36,4 @@ def piece_cover_path(item, filename): + "." + filename.split(".")[-1] ) - return f"user/{item.owner_id}/{fn}" + return f"user/{item.owner_id or '_'}/{fn}" diff --git a/catalog/management/commands/cat.py b/catalog/management/commands/cat.py index 6fddbc65..f2c13cd1 100644 --- a/catalog/management/commands/cat.py +++ b/catalog/management/commands/cat.py @@ -31,10 +31,17 @@ def handle(self, *args, **options): self.stdout.write(f"Fetching from {site}") if options["save"]: resource = site.get_resource_ready(ignore_existing_content=options["force"]) - pprint.pp(resource.metadata) - pprint.pp(site.get_item()) - pprint.pp(site.get_item().cover) - pprint.pp(site.get_item().metadata) + if resource: + pprint.pp(resource.metadata) + else: + self.stdout.write(self.style.ERROR(f"Unable to get resource for {url}")) + item = site.get_item() + if item: + pprint.pp(item.cover) + pprint.pp(item.metadata) + pprint.pp(item.absolute_url) + else: + self.stdout.write(self.style.ERROR(f"Unable to get item for {url}")) else: resource = site.scrape() pprint.pp(resource.metadata) diff --git a/catalog/management/commands/crawl.py b/catalog/management/commands/crawl.py index cacc368f..241fb0a8 100644 --- a/catalog/management/commands/crawl.py +++ b/catalog/management/commands/crawl.py @@ -29,16 +29,19 @@ def handle(self, *args, **options): logger.info(f"Navigating {url}") content = ProxiedDownloader(url).download().html() urls = content.xpath("//a/@href") - for _u in urls: + for _u in urls: # type:ignore u = urljoin(url, _u) if u not in history and u not in queue: if len([p for p in item_patterns if re.match(p, u)]) > 0: site = SiteManager.get_site_by_url(u) - u = site.url - if u not in history: - history.append(u) - logger.info(f"Fetching {u}") - site.get_resource_ready() + if site: + u = site.url + if u not in history: + history.append(u) + logger.info(f"Fetching {u}") + site.get_resource_ready() + else: + logger.warning(f"unable to parse {u}") elif pattern and u.find(pattern) >= 0: queue.append(u) logger.info("Crawl finished.") diff --git a/catalog/management/commands/discover.py b/catalog/management/commands/discover.py index 32e316ba..d259189a 100644 --- a/catalog/management/commands/discover.py +++ b/catalog/management/commands/discover.py @@ -7,7 +7,7 @@ from loguru import logger from catalog.models import * -from journal.models import Comment, ShelfMember, query_item_category +from journal.models import Comment, ShelfMember, q_item_in_category MAX_ITEMS_PER_PERIOD = 12 MIN_MARKS = 2 @@ -28,7 +28,7 @@ def add_arguments(self, parser): def get_popular_marked_item_ids(self, category, days, exisiting_ids): item_ids = [ m["item_id"] - for m in ShelfMember.objects.filter(query_item_category(category)) + for m in ShelfMember.objects.filter(q_item_in_category(category)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .exclude(item_id__in=exisiting_ids) .values("item_id") @@ -40,7 +40,7 @@ def get_popular_marked_item_ids(self, category, days, exisiting_ids): def get_popular_commented_podcast_ids(self, days, exisiting_ids): return list( - Comment.objects.filter(query_item_category(ItemCategory.Podcast)) + Comment.objects.filter(q_item_in_category(ItemCategory.Podcast)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .annotate(p=F("item__podcastepisode__program")) .filter(p__isnull=False) diff --git a/catalog/management/commands/index.py b/catalog/management/commands/index.py index d7916761..1a07cb59 100644 --- a/catalog/management/commands/index.py +++ b/catalog/management/commands/index.py @@ -1,6 +1,7 @@ import pprint from datetime import timedelta from time import sleep +from typing import TYPE_CHECKING from django.conf import settings from django.core.management.base import BaseCommand @@ -8,7 +9,8 @@ from django.utils import timezone from tqdm import tqdm -from catalog.models import * +from catalog.models import Item +from catalog.search.typesense import Indexer BATCH_SIZE = 1000 diff --git a/catalog/migrations/0011_alter_externalresource_id_type_and_more.py b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py new file mode 100644 index 00000000..3659a6d5 --- /dev/null +++ b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py @@ -0,0 +1,117 @@ +# Generated by Django 4.2.3 on 2023-08-06 02:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("catalog", "0011_remove_item_last_editor"), + ] + + operations = [ + migrations.AlterField( + model_name="externalresource", + name="id_type", + field=models.CharField( + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="IdType of the source site", + ), + ), + migrations.AlterField( + model_name="itemlookupid", + name="id_type", + field=models.CharField( + blank=True, + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="源网站", + ), + ), + ] diff --git a/catalog/search/external.py b/catalog/search/external.py index 51da0806..338eb471 100644 --- a/catalog/search/external.py +++ b/catalog/search/external.py @@ -23,7 +23,8 @@ def __init__( "all": [ { "url": source_url, - "site_name": {"label": source_site, "value": source_site}, + "site_name": source_site, + "site_label": source_site, } ] } diff --git a/catalog/search/typesense.py b/catalog/search/typesense.py index 61b3e32c..57816a12 100644 --- a/catalog/search/typesense.py +++ b/catalog/search/typesense.py @@ -14,7 +14,6 @@ from catalog.models import Item -INDEX_NAME = "catalog" SEARCHABLE_ATTRIBUTES = [ "title", "orig_title", @@ -125,7 +124,7 @@ class Indexer: def instance(cls) -> Collection: if cls._instance is None: cls._instance = typesense.Client(settings.TYPESENSE_CONNECTION).collections[ - INDEX_NAME + settings.TYPESENSE_INDEX_NAME ] return cls._instance # type: ignore @@ -178,7 +177,7 @@ def config(cls): {"name": ".*", "optional": True, "locale": "zh", "type": "auto"}, ] return { - "name": INDEX_NAME, + "name": settings.TYPESENSE_INDEX_NAME, "fields": fields, # "default_sorting_field": "rating_count", } diff --git a/catalog/search/views.py b/catalog/search/views.py index d3ab3780..4af3c393 100644 --- a/catalog/search/views.py +++ b/catalog/search/views.py @@ -130,9 +130,14 @@ def search(request): ) if keywords.find("://") > 0: + host = keywords.split("://")[1].split("/")[0] + if host == settings.SITE_INFO["site_domain"]: + return redirect(keywords) site = SiteManager.get_site_by_url(keywords) if site: return fetch(request, keywords, False, site) + if request.GET.get("r"): + return redirect(keywords) items, num_pages, _, dup_items = query_index(keywords, categories, tag, p) return render( diff --git a/catalog/sites/__init__.py b/catalog/sites/__init__.py index 6fb4f868..7518ebfb 100644 --- a/catalog/sites/__init__.py +++ b/catalog/sites/__init__.py @@ -9,13 +9,14 @@ from .douban_game import DoubanGame from .douban_movie import DoubanMovie from .douban_music import DoubanMusic +from .fedi import FediverseInstance from .goodreads import Goodreads from .google_books import GoogleBooks from .igdb import IGDB from .imdb import IMDB - -# from .apple_podcast import ApplePodcast from .rss import RSS from .spotify import Spotify from .steam import Steam from .tmdb import TMDB_Movie + +# from .apple_podcast import ApplePodcast diff --git a/catalog/sites/fedi.py b/catalog/sites/fedi.py new file mode 100644 index 00000000..958b33ec --- /dev/null +++ b/catalog/sites/fedi.py @@ -0,0 +1,101 @@ +import re + +from django.core.validators import URLValidator +from loguru import logger + +from catalog.common import * +from catalog.models import * + + +@SiteManager.register +class FediverseInstance(AbstractSite): + SITE_NAME = SiteName.Fediverse + ID_TYPE = IdType.Fediverse + URL_PATTERNS = [] + WIKI_PROPERTY_ID = "" + DEFAULT_MODEL = None + id_type_mapping = { + "isbn": IdType.ISBN, + "imdb": IdType.IMDB, + "barcode": IdType.GTIN, + } + supported_types = { + "Book": Edition, + "Movie": Movie, + "TVShow": TVShow, + "TVSeason": TVSeason, + "TVEpisode": TVEpisode, + "Album": Album, + "Game": Game, + "Podcast": Podcast, + "Performance": Performance, + "PerformanceProduction": PerformanceProduction, + } + request_header = {"User-Agent": "NeoDB/0.5", "Accept": "application/activity+json"} + + @classmethod + def id_to_url(cls, id_value): + return id_value + + @classmethod + def url_to_id(cls, url: str): + u = url.split("://", 1)[1].split("/", 1) + return "https://" + u[0].lower() + "/" + u[1] + + @classmethod + def validate_url_fallback(cls, url): + val = URLValidator() + try: + val(url) + if ( + url.split("://", 1)[1].split("/", 1)[0].lower() + == settings.SITE_INFO["site_domain"] + ): + # disallow local instance URLs + return False + return cls.get_json_from_url(url) is not None + except Exception: + return False + + @classmethod + def get_json_from_url(cls, url): + j = CachedDownloader(url, headers=cls.request_header).download().json() + if j.get("type") not in cls.supported_types.keys(): + raise ValueError("Not a supported format or type") + if j.get("id") != url: + logger.warning(f"ID mismatch: {j.get('id')} != {url}") + return j + + def scrape(self): + data = self.get_json_from_url(self.url) + img_url = data.get("cover_image_url") + raw_img, img_ext = ( + BasicImageDownloader.download_image(img_url, None, headers={}) + if img_url + else (None, None) + ) + ids = {} + data["preferred_model"] = data.get("type") + data["prematched_resources"] = [] + for ext in data.get("external_resources", []): + site = SiteManager.get_site_by_url(ext.get("url")) + if site and site.ID_TYPE != self.ID_TYPE: + ids[site.ID_TYPE] = site.id_value + data["prematched_resources"].append( + { + "model": data["preferred_model"], + "id_type": site.ID_TYPE, + "id_value": site.id_value, + "url": site.url, + } + ) + # for k, v in self.id_type_mapping.items(): + # if data.get(k): + # ids[v] = data.get(k) + d = ResourceContent( + metadata=data, + cover_image=raw_img, + cover_image_extention=img_ext, + lookup_ids=ids, + ) + return d diff --git a/catalog/sites/rss.py b/catalog/sites/rss.py index 7089a511..11dba3c3 100644 --- a/catalog/sites/rss.py +++ b/catalog/sites/rss.py @@ -33,7 +33,8 @@ class RSS(AbstractSite): def parse_feed_from_url(url): if not url: return None - feed = cache.get(url) + cache_key = f"rss:{url}" + feed = cache.get(cache_key) if feed: return feed if get_mock_mode(): @@ -50,7 +51,7 @@ def parse_feed_from_url(url): feed, open(settings.DOWNLOADER_SAVEDIR + "/" + get_mock_file(url), "wb"), ) - cache.set(url, feed, timeout=300) + cache.set(cache_key, feed, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) return feed @classmethod diff --git a/catalog/templates/_item_card.html b/catalog/templates/_item_card.html index ade3588d..2e24c059 100644 --- a/catalog/templates/_item_card.html +++ b/catalog/templates/_item_card.html @@ -7,7 +7,7 @@
{% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_card_metadata_base.html b/catalog/templates/_item_card_metadata_base.html index 3cad9768..4915fdfd 100644 --- a/catalog/templates/_item_card_metadata_base.html +++ b/catalog/templates/_item_card_metadata_base.html @@ -15,7 +15,7 @@
{% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_comments.html b/catalog/templates/_item_comments.html index 61fd2b3d..2a620b58 100644 --- a/catalog/templates/_item_comments.html +++ b/catalog/templates/_item_comments.html @@ -53,7 +53,7 @@ + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_comments_by_episode.html b/catalog/templates/_item_comments_by_episode.html index 47bfd7c8..0487894e 100644 --- a/catalog/templates/_item_comments_by_episode.html +++ b/catalog/templates/_item_comments_by_episode.html @@ -58,7 +58,7 @@ + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_reviews.html b/catalog/templates/_item_reviews.html index c18590ce..5908c93d 100644 --- a/catalog/templates/_item_reviews.html +++ b/catalog/templates/_item_reviews.html @@ -18,7 +18,7 @@ + {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_user_pieces.html b/catalog/templates/_item_user_pieces.html index 04994860..3e90664a 100644 --- a/catalog/templates/_item_user_pieces.html +++ b/catalog/templates/_item_user_pieces.html @@ -66,7 +66,7 @@
+ {% if mark.comment.shared_link %} href="{{ mark.comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ mark.comment.created_time|date }} {% endcomment %} @@ -89,7 +89,7 @@
+ {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ comment.created_time|date }} {% endcomment %} @@ -127,7 +127,7 @@
+ {% if mark.review.shared_link %} href="{{ mark.review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.review.created_time|date }} diff --git a/catalog/templates/_sidebar_edit.html b/catalog/templates/_sidebar_edit.html index 1bd7a3f2..1a84f0bf 100644 --- a/catalog/templates/_sidebar_edit.html +++ b/catalog/templates/_sidebar_edit.html @@ -52,7 +52,7 @@
编辑选项
{% for res in item.external_resources.all %}
- {% trans '源网站' %}: {{ res.site_name.label }} + {% trans '源网站' %}: {{ res.site_label }}
{% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/item_mark_list.html b/catalog/templates/item_mark_list.html index e4da00f3..ed0c3505 100644 --- a/catalog/templates/item_mark_list.html +++ b/catalog/templates/item_mark_list.html @@ -43,7 +43,7 @@
+ {% if mark.shared_link %} href="{{ mark.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.created_time|date }} diff --git a/catalog/templates/item_review_list.html b/catalog/templates/item_review_list.html index aa4d3c2b..90e3339e 100644 --- a/catalog/templates/item_review_list.html +++ b/catalog/templates/item_review_list.html @@ -31,7 +31,7 @@
+ {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% liked_piece review as liked %} diff --git a/catalog/urls.py b/catalog/urls.py index 94125c23..3e41ccf6 100644 --- a/catalog/urls.py +++ b/catalog/urls.py @@ -129,8 +129,9 @@ def _get_all_url_paths(): mark_list, name="mark_list", ), - path("search/", search, name="search"), - path("search/external/", external_search, name="external_search"), + path("search", search, name="search"), + path("search/", search, name="search_legacy"), + path("search/external", external_search, name="external_search"), path("fetch_refresh/", fetch_refresh, name="fetch_refresh"), path("refetch", refetch, name="refetch"), path("unlink", unlink, name="unlink"), diff --git a/catalog/views.py b/catalog/views.py index 11ac0b3d..a9635c28 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -19,9 +19,9 @@ ShelfMember, ShelfType, ShelfTypeNames, - query_following, - query_item_category, - query_visible, + q_item_in_category, + q_piece_in_home_feed_of_user, + q_piece_visible_to_user, ) from .forms import * @@ -74,6 +74,8 @@ def retrieve(request, item_path, item_uuid): item_url = f"/{item_path}/{item_uuid}" if item.url != item_url: return redirect(item.url) + if request.headers.get("Accept", "").endswith("json"): + return redirect(item.api_url) skipcheck = request.GET.get("skipcheck", False) and request.user.is_authenticated if not skipcheck and item.merged_to_item: return redirect(item.merged_to_item.url) @@ -91,16 +93,16 @@ def retrieve(request, item_path, item_uuid): child_item_comments = [] shelf_types = [(n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category] if request.user.is_authenticated: - visible = query_visible(request.user) - mark = Mark(request.user, item) + visible = q_piece_visible_to_user(request.user) + mark = Mark(request.user.identity, item) child_item_comments = Comment.objects.filter( - owner=request.user, item__in=item.child_items.all() + owner=request.user.identity, item__in=item.child_items.all() ) review = mark.review - my_collections = item.collections.all().filter(owner=request.user) + my_collections = item.collections.all().filter(owner=request.user.identity) collection_list = ( item.collections.all() - .exclude(owner=request.user) + .exclude(owner=request.user.identity) .filter(visible) .annotate(like_counts=Count("likes")) .order_by("-like_counts") @@ -145,9 +147,9 @@ def mark_list(request, item_path, item_uuid, following_only=False): raise Http404() queryset = ShelfMember.objects.filter(item=item).order_by("-created_time") if following_only: - queryset = queryset.filter(query_following(request.user)) + queryset = queryset.filter(q_piece_in_home_feed_of_user(request.user)) else: - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) marks = paginator.get_page(page_number) @@ -169,7 +171,7 @@ def review_list(request, item_path, item_uuid): if not item: raise Http404() queryset = Review.objects.filter(item=item).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) reviews = paginator.get_page(page_number) @@ -192,7 +194,7 @@ def comments(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -218,7 +220,7 @@ def comments_by_episode(request, item_path, item_uuid): else: ids = item.child_item_ids queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -240,7 +242,7 @@ def reviews(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Review.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) diff --git a/common/static/scss/_sitelabel.scss b/common/static/scss/_sitelabel.scss index de8c0b2e..021d1493 100644 --- a/common/static/scss/_sitelabel.scss +++ b/common/static/scss/_sitelabel.scss @@ -71,6 +71,12 @@ font-weight: lighter; } + .fedi { + background: var(--pico-primary); + color: white; + font-weight: lighter; + } + .tmdb { background: linear-gradient(90deg, #91CCA3, #1FB4E2); color: white; diff --git a/common/templates/_sidebar.html b/common/templates/_sidebar.html index 54fb321e..1783ceaa 100644 --- a/common/templates/_sidebar.html +++ b/common/templates/_sidebar.html @@ -51,7 +51,7 @@
{{ user.display_name }}
target="_blank" rel="noopener" onclick="window.open(this.href); return false;"> - @{{ user.handler }} + {{ user.handler }} diff --git a/common/templatetags/mastodon.py b/common/templatetags/mastodon.py index 8d31bf40..c06cb1b0 100644 --- a/common/templatetags/mastodon.py +++ b/common/templatetags/mastodon.py @@ -3,6 +3,8 @@ from django.template.defaultfilters import stringfilter from django.utils.translation import gettext_lazy as _ +from users.models import APIdentity, User + register = template.Library() @@ -13,9 +15,10 @@ def mastodon(domain): @register.simple_tag(takes_context=True) -def current_user_relationship(context, user): +def current_user_relationship(context, user: "User"): current_user = context["request"].user r = { + "requesting": False, "following": False, "unfollowable": False, "muting": False, @@ -24,21 +27,23 @@ def current_user_relationship(context, user): "status": "", } if current_user and current_user.is_authenticated and current_user != user: - if current_user.is_blocking(user) or user.is_blocking(current_user): + current_identity = context["request"].user.identity + target_identity = user.identity + if current_identity.is_blocking( + target_identity + ) or current_identity.is_blocked_by(target_identity): r["rejecting"] = True else: - r["muting"] = current_user.is_muting(user) - if user in current_user.local_muting.all(): - r["unmutable"] = current_user - if current_user.is_following(user): - r["following"] = True - if user in current_user.local_following.all(): - r["unfollowable"] = True - if current_user.is_followed_by(user): + r["muting"] = current_identity.is_muting(target_identity) + r["unmutable"] = r["muting"] + r["following"] = current_identity.is_following(target_identity) + r["unfollowable"] = r["following"] + if r["following"]: + if current_identity.is_followed_by(target_identity): r["status"] = _("互相关注") else: r["status"] = _("已关注") else: - if current_user.is_followed_by(user): + if current_identity.is_followed_by(target_identity): r["status"] = _("被ta关注") return r diff --git a/common/urls.py b/common/urls.py index 679dc795..e8bc0a15 100644 --- a/common/urls.py +++ b/common/urls.py @@ -1,4 +1,4 @@ -from django.urls import path +from django.urls import path, re_path from .views import * @@ -7,4 +7,5 @@ path("", home), path("home/", home, name="home"), path("me/", me, name="me"), + re_path("^~neodb~(?P.+)", ap_redirect), ] diff --git a/common/utils.py b/common/utils.py index fe43222b..80408737 100644 --- a/common/utils.py +++ b/common/utils.py @@ -1,9 +1,22 @@ import uuid +from typing import TYPE_CHECKING -from django.http import Http404 +from django.http import Http404, HttpRequest from django.utils import timezone from django.utils.baseconv import base62 +if TYPE_CHECKING: + from users.models import APIdentity, User + + +class AuthedHttpRequest(HttpRequest): + """ + A subclass of HttpRequest for type-checking only + """ + + user: "User" + target_identity: "APIdentity" + class PageLinksGenerator: # TODO inherit django paginator diff --git a/common/views.py b/common/views.py index 3b26f24b..ce10d644 100644 --- a/common/views.py +++ b/common/views.py @@ -6,7 +6,7 @@ @login_required def me(request): - return redirect(request.user.url) + return redirect(request.user.identity.url) def home(request): @@ -22,6 +22,10 @@ def home(request): return redirect(reverse("catalog:discover")) +def ap_redirect(request, uri): + return redirect(uri) + + def error_400(request, exception=None): return render( request, diff --git a/doc/install.md b/doc/install.md index 87d8bc59..336b6677 100644 --- a/doc/install.md +++ b/doc/install.md @@ -33,8 +33,8 @@ Install PostgreSQL, Redis and Python (3.10 or above) if not yet ### 1.1 Database Setup database ``` -CREATE DATABASE neodb ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE template0; CREATE ROLE neodb with LOGIN ENCRYPTED PASSWORD 'abadface'; +CREATE DATABASE neodb ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE template0; GRANT ALL ON DATABASE neodb TO neodb; ``` diff --git a/journal/api.py b/journal/api.py index 6435d345..e23cc870 100644 --- a/journal/api.py +++ b/journal/api.py @@ -10,8 +10,9 @@ from catalog.common.models import * from common.api import * +from mastodon.api import share_review -from .models import * +from .models import Mark, Review, ShelfType, TagManager, q_item_in_category class MarkSchema(Schema): @@ -84,9 +85,9 @@ def mark_item(request, item_uuid: str, mark: MarkInSchema): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - m = Mark(request.user, item) + m = Mark(request.user.identity, item) try: - TagManager.tag_item_by_user(item, request.user, mark.tags, mark.visibility) + TagManager.tag_item(item, request.user, mark.tags, mark.visibility) m.update( mark.shelf_type, mark.comment_text, @@ -114,7 +115,7 @@ def delete_mark(request, item_uuid: str): m = Mark(request.user, item) m.delete() # skip tag deletion for now to be consistent with web behavior - # TagManager.tag_item_by_user(item, request.user, [], 0) + # TagManager.tag_item(item, request.user, [], 0) return 200, {"message": "OK"} @@ -144,9 +145,9 @@ def list_reviews(request, category: AvailableItemCategory | None = None): `category` is optional, reviews for all categories will be returned if not specified. """ - queryset = Review.objects.filter(owner=request.user) + queryset = Review.objects.filter(owner=request.user.identity) if category: - queryset = queryset.filter(query_item_category(category)) + queryset = queryset.filter(q_item_in_category(category)) return queryset.prefetch_related("item") @@ -161,7 +162,7 @@ def get_review_by_item(request, item_uuid: str): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - review = Review.objects.filter(owner=request.user, item=item).first() + review = Review.objects.filter(owner=request.user.identity, item=item).first() if not review: return 404, {"message": "Review not found"} return review @@ -182,15 +183,17 @@ def review_item(request, item_uuid: str, review: ReviewInSchema): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - Review.review_item_by_user( + Review.update_item_review( item, request.user, review.title, review.body, review.visibility, created_time=review.created_time, - share_to_mastodon=review.post_to_fediverse, ) + if review.post_to_fediverse and request.user.mastodon_username: + share_review(review) + return 200, {"message": "OK"} @@ -205,7 +208,7 @@ def delete_review(request, item_uuid: str): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - Review.review_item_by_user(item, request.user, None, None) + Review.update_item_review(item, request.user, None, None) return 200, {"message": "OK"} diff --git a/journal/exporters/doufen.py b/journal/exporters/doufen.py index f37311a9..d859a611 100644 --- a/journal/exporters/doufen.py +++ b/journal/exporters/doufen.py @@ -47,9 +47,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Movie) | query_item_category( - ItemCategory.TV - ) + q = q_item_in_category(ItemCategory.Movie) | q_item_in_category(ItemCategory.TV) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -95,7 +93,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Music) + q = q_item_in_category(ItemCategory.Music) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -135,7 +133,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Book) + q = q_item_in_category(ItemCategory.Book) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -177,7 +175,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Game) + q = q_item_in_category(ItemCategory.Game) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -219,7 +217,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Podcast) + q = q_item_in_category(ItemCategory.Podcast) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -267,7 +265,7 @@ def export_marks_task(user): (ItemCategory.Podcast, "播客评论"), ]: ws = wb.create_sheet(title=label) - q = query_item_category(category) + q = q_item_in_category(category) reviews = Review.objects.filter(owner=user).filter(q).order_by("created_time") ws.append(review_heading) for review in reviews: diff --git a/journal/importers/douban.py b/journal/importers/douban.py index ee649ff8..6232d860 100644 --- a/journal/importers/douban.py +++ b/journal/importers/douban.py @@ -261,7 +261,7 @@ def import_mark(self, url, shelf_type, comment, rating_grade, tags, time): ) print("+", end="", flush=True) if tags: - TagManager.tag_item_by_user(item, self.user, tags) + TagManager.tag_item(item, self.user, tags) return 1 def import_review_sheet(self, worksheet, sheet_name): diff --git a/journal/management/commands/journal.py b/journal/management/commands/journal.py index 3fadaf91..fd8645a0 100644 --- a/journal/management/commands/journal.py +++ b/journal/management/commands/journal.py @@ -1,9 +1,10 @@ -import pprint - from django.core.management.base import BaseCommand +from catalog.models import Item from journal.importers.douban import DoubanImporter from journal.models import * +from journal.models.common import Content +from journal.models.itemlist import ListMember from users.models import User diff --git a/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py b/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py new file mode 100644 index 00000000..c6f07aba --- /dev/null +++ b/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py @@ -0,0 +1,50 @@ +# Generated by Django 4.2.3 on 2023-08-06 02:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ( + "journal", + "0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more", + ), + ] + + operations = [ + migrations.AlterModelOptions( + name="piece", + options={}, + ), + migrations.AddField( + model_name="piece", + name="local", + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name="piece", + name="post_id", + field=models.BigIntegerField(default=None, null=True), + ), + migrations.AddField( + model_name="comment", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="rating", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="review", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddIndex( + model_name="piece", + index=models.Index( + fields=["post_id"], name="journal_pie_post_id_6a74ff_idx" + ), + ), + ] diff --git a/journal/migrations/0015_alter_collection_featured_by_users_and_more.py b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py new file mode 100644 index 00000000..ad0a7d72 --- /dev/null +++ b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py @@ -0,0 +1,111 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:26 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("users", "0012_apidentity"), + ("journal", "0014_alter_piece_options_piece_local_piece_post_id_and_more"), + ] + + operations = [ + migrations.RemoveField( + model_name="collection", + name="featured_by_users", + ), + migrations.AddField( + model_name="collection", + name="featured_by", + field=models.ManyToManyField( + related_name="featured_collections", + through="journal.FeaturedCollection", + to="users.apidentity", + ), + ), + migrations.AlterField( + model_name="collection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="collectionmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="comment", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="featuredcollection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="like", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="rating", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="review", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelf", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelflogentry", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelfmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tag", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tagmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + ] diff --git a/journal/models/__init__.py b/journal/models/__init__.py index d1609f0b..3e9ab152 100644 --- a/journal/models/__init__.py +++ b/journal/models/__init__.py @@ -4,11 +4,11 @@ Piece, UserOwnedObjectMixin, VisibilityType, - max_visiblity_to, - q_visible_to, - query_following, - query_item_category, - query_visible, + max_visiblity_to_user, + q_item_in_category, + q_owned_piece_visible_to_user, + q_piece_in_home_feed_of_user, + q_piece_visible_to_user, ) from .like import Like from .mark import Mark diff --git a/journal/models/collection.py b/journal/models/collection.py index 430ea9c5..f204bc56 100644 --- a/journal/models/collection.py +++ b/journal/models/collection.py @@ -1,14 +1,14 @@ import re from functools import cached_property -from django.db import connection, models +from django.db import models from django.utils.translation import gettext_lazy as _ from catalog.collection.models import Collection as CatalogCollection from catalog.common import jsondata from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path from catalog.models import Item -from users.models import User +from users.models import APIdentity from .common import Piece from .itemlist import List, ListMember @@ -42,8 +42,8 @@ class Collection(List): collaborative = models.PositiveSmallIntegerField( default=0 ) # 0: Editable by owner only / 1: Editable by bi-direction followers - featured_by_users = models.ManyToManyField( - to=User, related_name="featured_collections", through="FeaturedCollection" + featured_by = models.ManyToManyField( + to=APIdentity, related_name="featured_collections", through="FeaturedCollection" ) @property @@ -56,25 +56,25 @@ def plain_description(self): html = render_md(self.brief) return _RE_HTML_TAG.sub(" ", html) - def featured_by_user_since(self, user): - f = FeaturedCollection.objects.filter(target=self, owner=user).first() + def featured_since(self, owner: APIdentity): + f = FeaturedCollection.objects.filter(target=self, owner=owner).first() return f.created_time if f else None - def get_stats_for_user(self, user): + def get_stats(self, owner: APIdentity): items = list(self.members.all().values_list("item_id", flat=True)) stats = {"total": len(items)} - for st, shelf in user.shelf_manager.shelf_list.items(): + for st, shelf in owner.shelf_manager.shelf_list.items(): stats[st] = shelf.members.all().filter(item_id__in=items).count() stats["percentage"] = ( round(stats["complete"] * 100 / stats["total"]) if stats["total"] else 0 ) return stats - def get_progress_for_user(self, user): + def get_progress(self, owner: APIdentity): items = list(self.members.all().values_list("item_id", flat=True)) if len(items) == 0: return 0 - shelf = user.shelf_manager.shelf_list["complete"] + shelf = owner.shelf_manager.shelf_list["complete"] return round( shelf.members.all().filter(item_id__in=items).count() * 100 / len(items) ) @@ -94,7 +94,7 @@ def save(self, *args, **kwargs): class FeaturedCollection(Piece): - owner = models.ForeignKey(User, on_delete=models.CASCADE) + owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) target = models.ForeignKey(Collection, on_delete=models.CASCADE) created_time = models.DateTimeField(auto_now_add=True) edited_time = models.DateTimeField(auto_now=True) @@ -108,4 +108,4 @@ def visibility(self): @cached_property def progress(self): - return self.target.get_progress_for_user(self.owner) + return self.target.get_progress(self.owner) diff --git a/journal/models/comment.py b/journal/models/comment.py index 05c59e1d..dc566dee 100644 --- a/journal/models/comment.py +++ b/journal/models/comment.py @@ -1,10 +1,11 @@ +from datetime import datetime from functools import cached_property from django.db import models from django.utils import timezone from catalog.models import Item -from users.models import User +from users.models import APIdentity from .common import Content from .rating import Rating @@ -14,13 +15,44 @@ class Comment(Content): text = models.TextField(blank=False, null=False) + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Comment", + "content": self.text, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + content = obj.get("content", "").strip() if obj else "" + if not content: + cls.objects.filter(owner=owner, item=item).delete() + return + d = { + "text": content, + "local": False, + "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @property def html(self): return render_text(self.text) @cached_property def rating_grade(self): - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @cached_property def mark(self): @@ -38,17 +70,17 @@ def item_url(self): return self.item.url @staticmethod - def comment_item_by_user( - item: Item, user: User, text: str | None, visibility=0, created_time=None + def comment_item( + item: Item, owner: APIdentity, text: str | None, visibility=0, created_time=None ): - comment = Comment.objects.filter(owner=user, item=item).first() + comment = Comment.objects.filter(owner=owner, item=item).first() if not text: if comment is not None: comment.delete() comment = None elif comment is None: comment = Comment.objects.create( - owner=user, + owner=owner, item=item, text=text, visibility=visibility, diff --git a/journal/models/common.py b/journal/models/common.py index a8861e86..4b5b35ff 100644 --- a/journal/models/common.py +++ b/journal/models/common.py @@ -1,30 +1,20 @@ import re import uuid -from functools import cached_property -import django.dispatch from django.conf import settings -from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import PermissionDenied -from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator from django.db import connection, models from django.db.models import Avg, Count, Q from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ -from markdownx.models import MarkdownxField from polymorphic.models import PolymorphicModel -from catalog.collection.models import Collection as CatalogCollection -from catalog.common import jsondata -from catalog.common.models import Item, ItemCategory -from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path +from catalog.common.models import AvailableItemCategory, Item, ItemCategory from catalog.models import * -from mastodon.api import share_review -from users.models import User +from takahe.utils import Takahe +from users.models import APIdentity, User from .mixins import UserOwnedObjectMixin -from .renderers import render_md, render_text _logger = logging.getLogger(__name__) @@ -35,46 +25,57 @@ class VisibilityType(models.IntegerChoices): Private = 2, _("仅自己") -def q_visible_to(viewer, owner): +def q_owned_piece_visible_to_user(viewing_user: User, owner: APIdentity): + if ( + not viewing_user + or not viewing_user.is_authenticated + or not viewing_user.identity + ): + return Q(visibility=0) + viewer = viewing_user.identity if viewer == owner: return Q() # elif viewer.is_blocked_by(owner): # return Q(pk__in=[]) - elif viewer.is_authenticated and viewer.is_following(owner): - return Q(visibility__in=[0, 1]) + elif viewer.is_following(owner): + return Q(owner=owner, visibility__in=[0, 1]) else: - return Q(visibility=0) + return Q(owner=owner, visibility=0) -def max_visiblity_to(viewer, owner): +def max_visiblity_to_user(viewing_user: User, owner: APIdentity): + if ( + not viewing_user + or not viewing_user.is_authenticated + or not viewing_user.identity + ): + return 0 + viewer = viewing_user.identity if viewer == owner: return 2 - # elif viewer.is_blocked_by(owner): - # return Q(pk__in=[]) - elif viewer.is_authenticated and viewer.is_following(owner): + elif viewer.is_following(owner): return 1 else: return 0 -def query_visible(user): +def q_piece_visible_to_user(user: User): + if not user or not user.is_authenticated or not user.identity: + return Q(visibility=0) return ( - ( - Q(visibility=0) - | Q(owner_id__in=user.following, visibility=1) - | Q(owner_id=user.id) - ) - & ~Q(owner_id__in=user.ignoring) - if user.is_authenticated - else Q(visibility=0) - ) + Q(visibility=0) + | Q(owner_id__in=user.identity.following, visibility=1) + | Q(owner_id=user.identity.pk) + ) & ~Q(owner_id__in=user.identity.ignoring) -def query_following(user): - return Q(owner_id__in=user.following, visibility__lt=2) | Q(owner_id=user.id) +def q_piece_in_home_feed_of_user(user: User): + return Q(owner_id__in=user.identity.following, visibility__lt=2) | Q( + owner_id=user.identity.pk + ) -def query_item_category(item_category): +def q_item_in_category(item_category: ItemCategory | AvailableItemCategory): classes = item_categories()[item_category] # q = Q(item__instance_of=classes[0]) # for cls in classes[1:]: @@ -92,7 +93,7 @@ def query_item_category(item_category): # class ImportSession(models.Model): -# owner = models.ForeignKey(User, on_delete=models.CASCADE) +# owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) # status = models.PositiveSmallIntegerField(default=ImportStatus.QUEUED) # importer = models.CharField(max_length=50) # file = models.CharField() @@ -115,6 +116,13 @@ def query_item_category(item_category): class Piece(PolymorphicModel, UserOwnedObjectMixin): url_path = "p" # subclass must specify this uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True) + local = models.BooleanField(default=True) + post_id = models.BigIntegerField(null=True, default=None) + + class Meta: + indexes = [ + models.Index(fields=["post_id"]), + ] @property def uuid(self): @@ -132,9 +140,18 @@ def absolute_url(self): def api_url(self): return f"/api/{self.url}" if self.url_path else None + @property + def shared_link(self): + return Takahe.get_post_url(self.post_id) if self.post_id else None + @property def like_count(self): - return self.likes.all().count() + return ( + Takahe.get_post_stats(self.post_id).get("likes", 0) if self.post_id else 0 + ) + + def is_liked_by(self, user): + return self.post_id and Takahe.post_liked_by(self.post_id, user) @classmethod def get_by_url(cls, url_or_b62): @@ -149,9 +166,17 @@ def get_by_url(cls, url_or_b62): obj = None return obj + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + raise NotImplemented + + @property + def ap_object(self): + raise NotImplemented + class Content(Piece): - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only @@ -161,6 +186,7 @@ class Content(Piece): ) # auto_now=True FIXME revert this after migration metadata = models.JSONField(default=dict) item = models.ForeignKey(Item, on_delete=models.PROTECT) + remote_id = models.CharField(max_length=200, null=True, default=None) def __str__(self): return f"{self.uuid}@{self.item}" diff --git a/journal/models/itemlist.py b/journal/models/itemlist.py index a5b5b543..f70a0497 100644 --- a/journal/models/itemlist.py +++ b/journal/models/itemlist.py @@ -5,7 +5,7 @@ from django.utils import timezone from catalog.models import Item, ItemCategory -from users.models import User +from users.models import APIdentity from .common import Piece @@ -15,24 +15,21 @@ class List(Piece): """ - List (abstract class) + List (abstract model) """ - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only - created_time = models.DateTimeField( - default=timezone.now - ) # auto_now_add=True FIXME revert this after migration - edited_time = models.DateTimeField( - default=timezone.now - ) # auto_now=True FIXME revert this after migration + created_time = models.DateTimeField(default=timezone.now) + edited_time = models.DateTimeField(default=timezone.now) metadata = models.JSONField(default=dict) class Meta: abstract = True + MEMBER_CLASS: Piece # MEMBER_CLASS = None # subclass must override this # subclass must add this: # items = models.ManyToManyField(Item, through='ListMember') @@ -146,14 +143,12 @@ class ListMember(Piece): parent = models.ForeignKey('List', related_name='members', on_delete=models.CASCADE) """ - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only created_time = models.DateTimeField(default=timezone.now) - edited_time = models.DateTimeField( - default=timezone.now - ) # auto_now=True FIXME revert this after migration + edited_time = models.DateTimeField(default=timezone.now) metadata = models.JSONField(default=dict) item = models.ForeignKey(Item, on_delete=models.PROTECT) position = models.PositiveIntegerField() diff --git a/journal/models/like.py b/journal/models/like.py index e0150915..9a06e433 100644 --- a/journal/models/like.py +++ b/journal/models/like.py @@ -3,13 +3,13 @@ from django.utils import timezone from django.utils.translation import gettext_lazy as _ -from users.models import User +from users.models import APIdentity from .common import Piece -class Like(Piece): - owner = models.ForeignKey(User, on_delete=models.PROTECT) +class Like(Piece): # TODO remove + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only @@ -18,25 +18,27 @@ class Like(Piece): target = models.ForeignKey(Piece, on_delete=models.CASCADE, related_name="likes") @staticmethod - def user_liked_piece(user, piece): - return Like.objects.filter(owner=user, target=piece).exists() + def user_liked_piece(owner, piece): + return Like.objects.filter(owner=owner.identity, target=piece).exists() @staticmethod - def user_like_piece(user, piece): + def user_like_piece(owner, piece): if not piece: return - like = Like.objects.filter(owner=user, target=piece).first() + like = Like.objects.filter(owner=owner.identity, target=piece).first() if not like: - like = Like.objects.create(owner=user, target=piece) + like = Like.objects.create(owner=owner.identity, target=piece) return like @staticmethod - def user_unlike_piece(user, piece): + def user_unlike_piece(owner, piece): if not piece: return - Like.objects.filter(owner=user, target=piece).delete() + Like.objects.filter(owner=owner.identity, target=piece).delete() @staticmethod - def user_likes_by_class(user, cls): + def user_likes_by_class(owner, cls): ctype_id = ContentType.objects.get_for_model(cls) - return Like.objects.filter(owner=user, target__polymorphic_ctype=ctype_id) + return Like.objects.filter( + owner=owner.identity, target__polymorphic_ctype=ctype_id + ) diff --git a/journal/models/mark.py b/journal/models/mark.py index ac5bbbf1..1961366c 100644 --- a/journal/models/mark.py +++ b/journal/models/mark.py @@ -12,6 +12,7 @@ from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ +from loguru import logger from markdownx.models import MarkdownxField from polymorphic.models import PolymorphicModel @@ -20,16 +21,14 @@ from catalog.common.models import Item, ItemCategory from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path from catalog.models import * -from mastodon.api import share_review -from users.models import User +from takahe.utils import Takahe +from users.models import APIdentity from .comment import Comment from .rating import Rating from .review import Review from .shelf import Shelf, ShelfLogEntry, ShelfManager, ShelfMember, ShelfType -_logger = logging.getLogger(__name__) - class Mark: """ @@ -38,8 +37,8 @@ class Mark: it mimics previous mark behaviour. """ - def __init__(self, user, item): - self.owner = user + def __init__(self, owner: APIdentity, item: Item): + self.owner = owner self.item = item @cached_property @@ -60,7 +59,7 @@ def shelf_type(self) -> ShelfType | None: @property def action_label(self) -> str: - if self.shelfmember: + if self.shelfmember and self.shelf_type: return ShelfManager.get_action_label(self.shelf_type, self.item.category) if self.comment: return ShelfManager.get_action_label( @@ -72,7 +71,7 @@ def action_label(self) -> str: def shelf_label(self) -> str | None: return ( ShelfManager.get_label(self.shelf_type, self.item.category) - if self.shelfmember + if self.shelf_type else None ) @@ -86,19 +85,23 @@ def metadata(self) -> dict | None: @property def visibility(self) -> int: - return ( - self.shelfmember.visibility - if self.shelfmember - else self.owner.preference.default_visibility - ) + if self.shelfmember: + return self.shelfmember.visibility + else: + logger.warning(f"no shelfmember for mark {self.owner}, {self.item}") + return 2 @cached_property def tags(self) -> list[str]: return self.owner.tag_manager.get_item_tags(self.item) + @cached_property + def rating(self): + return Rating.objects.filter(owner=self.owner, item=self.item).first() + @cached_property def rating_grade(self) -> int | None: - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @cached_property def comment(self) -> Comment | None: @@ -118,29 +121,24 @@ def review(self) -> Review | None: def update( self, - shelf_type: ShelfType | None, - comment_text: str | None, - rating_grade: int | None, - visibility: int, + shelf_type, + comment_text, + rating_grade, + visibility, metadata=None, created_time=None, share_to_mastodon=False, - silence=False, ): - # silence=False means update is logged. - share = ( - share_to_mastodon - and self.owner.mastodon_username - and shelf_type is not None - and ( - shelf_type != self.shelf_type - or comment_text != self.comment_text - or rating_grade != self.rating_grade - ) + post_to_feed = shelf_type is not None and ( + shelf_type != self.shelf_type + or comment_text != self.comment_text + or rating_grade != self.rating_grade ) + if shelf_type is None: + Takahe.delete_mark(self) if created_time and created_time >= timezone.now(): created_time = None - share_as_new_post = shelf_type != self.shelf_type + post_as_new = shelf_type != self.shelf_type original_visibility = self.visibility if shelf_type != self.shelf_type or visibility != original_visibility: self.shelfmember = self.owner.shelf_manager.move_item( @@ -148,9 +146,8 @@ def update( shelf_type, visibility=visibility, metadata=metadata, - silence=silence, ) - if not silence and self.shelfmember and created_time: + if self.shelfmember and created_time: # if it's an update(not delete) and created_time is specified, # update the timestamp of the shelfmember and log log = ShelfLogEntry.objects.filter( @@ -172,7 +169,7 @@ def update( timestamp=created_time, ) if comment_text != self.comment_text or visibility != original_visibility: - self.comment = Comment.comment_item_by_user( + self.comment = Comment.comment_item( self.item, self.owner, comment_text, @@ -180,35 +177,15 @@ def update( self.shelfmember.created_time if self.shelfmember else None, ) if rating_grade != self.rating_grade or visibility != original_visibility: - Rating.rate_item_by_user(self.item, self.owner, rating_grade, visibility) + Rating.update_item_rating(self.item, self.owner, rating_grade, visibility) self.rating_grade = rating_grade - if share: - # this is a bit hacky but let's keep it until move to implement ActivityPub, - # by then, we'll just change this to boost - from mastodon.api import share_mark - - self.shared_link = ( - self.shelfmember.metadata.get("shared_link") - if self.shelfmember.metadata and not share_as_new_post - else None - ) - self.save = lambda **args: None - result, code = share_mark(self) - if not result: - if code == 401: - raise PermissionDenied() - else: - raise ValueError(code) - if self.shelfmember.metadata.get("shared_link") != self.shared_link: - self.shelfmember.metadata["shared_link"] = self.shared_link - self.shelfmember.save() - elif share_as_new_post and self.shelfmember: - self.shelfmember.metadata["shared_link"] = None - self.shelfmember.save() - - def delete(self, silence=False): + + if post_to_feed: + Takahe.post_mark(self, post_as_new) + + def delete(self): # self.logs.delete() # When deleting a mark, all logs of the mark are deleted first. - self.update(None, None, None, 0, silence=silence) + self.update(None, None, None, 0) def delete_log(self, log_id): ShelfLogEntry.objects.filter( diff --git a/journal/models/mixins.py b/journal/models/mixins.py index 69d597d2..ac3f836b 100644 --- a/journal/models/mixins.py +++ b/journal/models/mixins.py @@ -1,4 +1,6 @@ -from typing import TYPE_CHECKING, Type +from typing import TYPE_CHECKING + +from users.models import APIdentity, User if TYPE_CHECKING: from .common import Piece @@ -9,18 +11,24 @@ class UserOwnedObjectMixin: UserOwnedObjectMixin Models must add these: - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField(default=0) """ - def is_visible_to(self: "Piece", viewer): # type: ignore + owner: APIdentity + visibility: int + + def is_visible_to(self: "Piece | Self", viewing_user: User) -> bool: # type: ignore owner = self.owner - if owner == viewer: - return True - if not owner.is_active: + if not owner or not owner.is_active: return False - if not viewer.is_authenticated: + if owner.user == viewing_user: + return True + if not viewing_user.is_authenticated: return self.visibility == 0 + viewer = viewing_user.identity # type: ignore[assignment] + if not viewer: + return False if self.visibility == 2: return False if viewer.is_blocking(owner) or owner.is_blocking(viewer): @@ -30,27 +38,9 @@ def is_visible_to(self: "Piece", viewer): # type: ignore else: return True - def is_editable_by(self: "Piece", viewer): # type: ignore - return viewer.is_authenticated and ( - viewer.is_staff or viewer.is_superuser or viewer == self.owner - ) - - @classmethod - def get_available(cls: "Type[Piece]", entity, request_user, following_only=False): # type: ignore - # e.g. SongMark.get_available(song, request.user) - query_kwargs = {entity.__class__.__name__.lower(): entity} - all_entities = cls.objects.filter(**query_kwargs).order_by( - "-created_time" - ) # get all marks for song - visible_entities = list( - filter( - lambda _entity: _entity.is_visible_to(request_user) - and ( - _entity.owner.mastodon_acct in request_user.mastodon_following - if following_only - else True - ), - all_entities, - ) + def is_editable_by(self: "Piece", viewing_user: User): # type: ignore + return viewing_user.is_authenticated and ( + viewing_user.is_staff + or viewing_user.is_superuser + or viewing_user == self.owner.user ) - return visible_entities diff --git a/journal/models/rating.py b/journal/models/rating.py index 255e049b..b9034ab2 100644 --- a/journal/models/rating.py +++ b/journal/models/rating.py @@ -1,10 +1,12 @@ +from datetime import datetime + from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator from django.db import connection, models from django.db.models import Avg, Count, Q from django.utils.translation import gettext_lazy as _ from catalog.models import Item, ItemCategory -from users.models import User +from users.models import APIdentity from .common import Content @@ -20,6 +22,51 @@ class Meta: default=0, validators=[MaxValueValidator(10), MinValueValidator(1)], null=True ) + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Rating", + "best": 10, + "worst": 1, + "value": self.grade, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + value = obj.get("value", 0) if obj else 0 + if not value: + cls.objects.filter(owner=owner, item=item).delete() + return + best = obj.get("best", 5) + worst = obj.get("worst", 1) + if best <= worst: + return + if value < worst: + value = worst + if value > best: + value = best + if best != 10 or worst != 1: + value = round(9 * (value - worst) / (best - worst)) + 1 + else: + value = round(value) + d = { + "grade": value, + "local": False, + "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @staticmethod def get_rating_for_item(item: Item) -> float | None: stat = Rating.objects.filter(grade__isnull=False) @@ -65,19 +112,19 @@ def get_rating_distribution_for_item(item: Item): return r @staticmethod - def rate_item_by_user( - item: Item, user: User, rating_grade: int | None, visibility: int = 0 + def update_item_rating( + item: Item, owner: APIdentity, rating_grade: int | None, visibility: int = 0 ): if rating_grade and (rating_grade < 1 or rating_grade > 10): raise ValueError(f"Invalid rating grade: {rating_grade}") - rating = Rating.objects.filter(owner=user, item=item).first() + rating = Rating.objects.filter(owner=owner, item=item).first() if not rating_grade: if rating: rating.delete() rating = None elif rating is None: rating = Rating.objects.create( - owner=user, item=item, grade=rating_grade, visibility=visibility + owner=owner, item=item, grade=rating_grade, visibility=visibility ) elif rating.grade != rating_grade or rating.visibility != visibility: rating.visibility = visibility @@ -86,6 +133,6 @@ def rate_item_by_user( return rating @staticmethod - def get_item_rating_by_user(item: Item, user: User) -> int | None: - rating = Rating.objects.filter(owner=user, item=item).first() + def get_item_rating(item: Item, owner: APIdentity) -> int | None: + rating = Rating.objects.filter(owner=owner, item=item).first() return (rating.grade or None) if rating else None diff --git a/journal/models/renderers.py b/journal/models/renderers.py index ef6c2f5a..5e261031 100644 --- a/journal/models/renderers.py +++ b/journal/models/renderers.py @@ -19,7 +19,7 @@ _markdown = mistune.create_markdown(plugins=_mistune_plugins) -def convert_leading_space_in_md(body) -> str: +def convert_leading_space_in_md(body: str) -> str: body = re.sub(r"^\s+$", "", body, flags=re.MULTILINE) body = re.sub( r"^(\u2003*)( +)", @@ -30,11 +30,11 @@ def convert_leading_space_in_md(body) -> str: return body -def render_md(s) -> str: +def render_md(s: str) -> str: return cast(str, _markdown(s)) -def _spolier(s): +def _spolier(s: str) -> str: l = s.split(">!", 1) if len(l) == 1: return escape(s) @@ -48,5 +48,5 @@ def _spolier(s): ) -def render_text(s): +def render_text(s: str) -> str: return _spolier(s) diff --git a/journal/models/review.py b/journal/models/review.py index 31424c27..c7129f42 100644 --- a/journal/models/review.py +++ b/journal/models/review.py @@ -7,8 +7,7 @@ from markdownx.models import MarkdownxField from catalog.models import Item -from mastodon.api import share_review -from users.models import User +from users.models import APIdentity from .common import Content from .rating import Rating @@ -44,21 +43,20 @@ def mark(self): @cached_property def rating_grade(self): - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @classmethod - def review_item_by_user( + def update_item_review( cls, item: Item, - user: User, + owner: APIdentity, title: str | None, body: str | None, visibility=0, created_time=None, - share_to_mastodon=False, ): if title is None: - review = Review.objects.filter(owner=user, item=item).first() + review = Review.objects.filter(owner=owner, item=item).first() if review is not None: review.delete() return None @@ -71,9 +69,7 @@ def review_item_by_user( defaults["created_time"] = ( created_time if created_time < timezone.now() else timezone.now() ) - review, created = cls.objects.update_or_create( - item=item, owner=user, defaults=defaults + review, _ = cls.objects.update_or_create( + item=item, owner=owner, defaults=defaults ) - if share_to_mastodon and user.mastodon_username: - share_review(review) return review diff --git a/journal/models/shelf.py b/journal/models/shelf.py index 91290c0d..d7b28852 100644 --- a/journal/models/shelf.py +++ b/journal/models/shelf.py @@ -1,14 +1,17 @@ +from datetime import datetime from functools import cached_property from typing import TYPE_CHECKING from django.db import connection, models from django.utils import timezone from django.utils.translation import gettext_lazy as _ +from loguru import logger from catalog.models import Item, ItemCategory -from users.models import User +from takahe.models import Identity +from users.models import APIdentity -from .common import query_item_category +from .common import q_item_in_category from .itemlist import List, ListMember if TYPE_CHECKING: @@ -60,6 +63,43 @@ class Meta: models.Index(fields=["parent_id", "visibility", "created_time"]), ] + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Status", + "status": self.parent.shelf_type, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object( + cls, owner: APIdentity, item: Identity, obj: dict, post_id: int, visibility: int + ): + if not obj: + cls.objects.filter(owner=owner, item=item).delete() + return + shelf = owner.shelf_manager.get_shelf(obj["status"]) + if not shelf: + logger.warning(f"unable to locate shelf for {owner}, {obj}") + return + d = { + "parent": shelf, + "position": 0, + "local": False, + # "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @cached_property def mark(self) -> "Mark": from .mark import Mark @@ -108,7 +148,7 @@ def __str__(self): class ShelfLogEntry(models.Model): - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) shelf_type = models.CharField(choices=ShelfType.choices, max_length=100, null=True) item = models.ForeignKey(Item, on_delete=models.PROTECT) timestamp = models.DateTimeField() # this may later be changed by user @@ -135,8 +175,8 @@ class ShelfManager: ShelfLogEntry can later be modified if user wish to change history """ - def __init__(self, user): - self.owner = user + def __init__(self, owner): + self.owner = owner qs = Shelf.objects.filter(owner=self.owner) self.shelf_list = {v.shelf_type: v for v in qs} if len(self.shelf_list) == 0: @@ -146,13 +186,18 @@ def initialize(self): for qt in ShelfType: self.shelf_list[qt] = Shelf.objects.create(owner=self.owner, shelf_type=qt) - def locate_item(self, item) -> ShelfMember | None: + def locate_item(self, item: Item) -> ShelfMember | None: return ShelfMember.objects.filter(item=item, owner=self.owner).first() - def move_item(self, item, shelf_type, visibility=0, metadata=None, silence=False): + def move_item( + self, + item: Item, + shelf_type: ShelfType, + visibility: int = 0, + metadata: dict | None = None, + ): # shelf_type=None means remove from current shelf # metadata=None means no change - # silence=False means move_item is logged. if not item: raise ValueError("empty item") new_shelfmember = None @@ -185,7 +230,7 @@ def move_item(self, item, shelf_type, visibility=0, metadata=None, silence=False elif visibility != last_visibility: # change visibility last_shelfmember.visibility = visibility last_shelfmember.save() - if changed and not silence: + if changed: if metadata is None: metadata = last_metadata or {} log_time = ( @@ -205,18 +250,20 @@ def move_item(self, item, shelf_type, visibility=0, metadata=None, silence=False def get_log(self): return ShelfLogEntry.objects.filter(owner=self.owner).order_by("timestamp") - def get_log_for_item(self, item): + def get_log_for_item(self, item: Item): return ShelfLogEntry.objects.filter(owner=self.owner, item=item).order_by( "timestamp" ) - def get_shelf(self, shelf_type): + def get_shelf(self, shelf_type: ShelfType): return self.shelf_list[shelf_type] - def get_latest_members(self, shelf_type, item_category=None): + def get_latest_members( + self, shelf_type: ShelfType, item_category: ItemCategory | None = None + ): qs = self.shelf_list[shelf_type].members.all().order_by("-created_time") if item_category: - return qs.filter(query_item_category(item_category)) + return qs.filter(q_item_in_category(item_category)) else: return qs @@ -229,14 +276,16 @@ def get_latest_members(self, shelf_type, item_category=None): # return shelf.members.all().order_by @classmethod - def get_action_label(cls, shelf_type, item_category) -> str: + def get_action_label( + cls, shelf_type: ShelfType, item_category: ItemCategory + ) -> str: sts = [ n[2] for n in ShelfTypeNames if n[0] == item_category and n[1] == shelf_type ] return sts[0] if sts else str(shelf_type) @classmethod - def get_label(cls, shelf_type, item_category): + def get_label(cls, shelf_type: ShelfType, item_category: ItemCategory): ic = ItemCategory(item_category).label st = cls.get_action_label(shelf_type, item_category) return ( @@ -246,10 +295,10 @@ def get_label(cls, shelf_type, item_category): ) @staticmethod - def get_manager_for_user(user): - return ShelfManager(user) + def get_manager_for_user(owner: APIdentity): + return ShelfManager(owner) - def get_calendar_data(self, max_visiblity): + def get_calendar_data(self, max_visiblity: int): shelf_id = self.get_shelf(ShelfType.COMPLETE).pk timezone_offset = timezone.localtime(timezone.now()).strftime("%z") timezone_offset = timezone_offset[: len(timezone_offset) - 2] diff --git a/journal/models/tag.py b/journal/models/tag.py index 3b550bcf..28d43adc 100644 --- a/journal/models/tag.py +++ b/journal/models/tag.py @@ -8,7 +8,7 @@ from catalog.collection.models import Collection as CatalogCollection from catalog.models import Item -from users.models import User +from users.models import APIdentity from .itemlist import List, ListMember @@ -66,9 +66,9 @@ def indexable_tags_for_item(item): return tag_titles @staticmethod - def all_tags_for_user(user, public_only=False): + def all_tags_by_owner(owner, public_only=False): tags = ( - user.tag_set.all() + owner.tag_set.all() .values("title") .annotate(frequency=Count("members__id")) .order_by("-frequency") @@ -78,46 +78,44 @@ def all_tags_for_user(user, public_only=False): return list(map(lambda t: t["title"], tags)) @staticmethod - def tag_item_by_user(item, user, tag_titles, default_visibility=0): + def tag_item( + item: Item, + owner: APIdentity, + tag_titles: list[str], + default_visibility: int = 0, + ): titles = set([Tag.cleanup_title(tag_title) for tag_title in tag_titles]) current_titles = set( - [m.parent.title for m in TagMember.objects.filter(owner=user, item=item)] + [m.parent.title for m in TagMember.objects.filter(owner=owner, item=item)] ) for title in titles - current_titles: - tag = Tag.objects.filter(owner=user, title=title).first() + tag = Tag.objects.filter(owner=owner, title=title).first() if not tag: tag = Tag.objects.create( - owner=user, title=title, visibility=default_visibility + owner=owner, title=title, visibility=default_visibility ) tag.append_item(item, visibility=default_visibility) for title in current_titles - titles: - tag = Tag.objects.filter(owner=user, title=title).first() + tag = Tag.objects.filter(owner=owner, title=title).first() if tag: tag.remove_item(item) @staticmethod - def get_item_tags_by_user(item, user): - current_titles = [ - m.parent.title for m in TagMember.objects.filter(owner=user, item=item) - ] - return current_titles + def get_manager_for_user(owner): + return TagManager(owner) - @staticmethod - def get_manager_for_user(user): - return TagManager(user) - - def __init__(self, user): - self.owner = user + def __init__(self, owner): + self.owner = owner @property def all_tags(self): - return TagManager.all_tags_for_user(self.owner) + return TagManager.all_tags_by_owner(self.owner) @property def public_tags(self): - return TagManager.all_tags_for_user(self.owner, public_only=True) + return TagManager.all_tags_by_owner(self.owner, public_only=True) - def get_item_tags(self, item): + def get_item_tags(self, item: Item): return sorted( [ m["parent__title"] diff --git a/journal/models/utils.py b/journal/models/utils.py index eba94481..0ab8b4b0 100644 --- a/journal/models/utils.py +++ b/journal/models/utils.py @@ -2,7 +2,7 @@ from loguru import logger from catalog.models import Item -from users.models import User +from users.models import APIdentity from .collection import Collection, CollectionMember, FeaturedCollection from .comment import Comment @@ -10,27 +10,28 @@ from .itemlist import ListMember from .rating import Rating from .review import Review -from .shelf import Shelf, ShelfLogEntry, ShelfManager, ShelfMember -from .tag import Tag, TagManager, TagMember +from .shelf import ShelfLogEntry, ShelfMember +from .tag import Tag, TagMember -def reset_journal_visibility_for_user(user: User, visibility: int): - ShelfMember.objects.filter(owner=user).update(visibility=visibility) - Comment.objects.filter(owner=user).update(visibility=visibility) - Rating.objects.filter(owner=user).update(visibility=visibility) - Review.objects.filter(owner=user).update(visibility=visibility) +def reset_journal_visibility_for_user(owner: APIdentity, visibility: int): + ShelfMember.objects.filter(owner=owner).update(visibility=visibility) + Comment.objects.filter(owner=owner).update(visibility=visibility) + Rating.objects.filter(owner=owner).update(visibility=visibility) + Review.objects.filter(owner=owner).update(visibility=visibility) -def remove_data_by_user(user: User): - ShelfMember.objects.filter(owner=user).delete() - Comment.objects.filter(owner=user).delete() - Rating.objects.filter(owner=user).delete() - Review.objects.filter(owner=user).delete() - TagMember.objects.filter(owner=user).delete() - Tag.objects.filter(owner=user).delete() - CollectionMember.objects.filter(owner=user).delete() - Collection.objects.filter(owner=user).delete() - FeaturedCollection.objects.filter(owner=user).delete() +def remove_data_by_user(owner: APIdentity): + ShelfMember.objects.filter(owner=owner).delete() + ShelfLogEntry.objects.filter(owner=owner).delete() + Comment.objects.filter(owner=owner).delete() + Rating.objects.filter(owner=owner).delete() + Review.objects.filter(owner=owner).delete() + TagMember.objects.filter(owner=owner).delete() + Tag.objects.filter(owner=owner).delete() + CollectionMember.objects.filter(owner=owner).delete() + Collection.objects.filter(owner=owner).delete() + FeaturedCollection.objects.filter(owner=owner).delete() def update_journal_for_merged_item( diff --git a/journal/templates/_list_item.html b/journal/templates/_list_item.html index feb98f83..344b171d 100644 --- a/journal/templates/_list_item.html +++ b/journal/templates/_list_item.html @@ -55,7 +55,7 @@ + {% if mark.shared_link %} href="{{ mark.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.created_time|date }} @@ -88,7 +88,7 @@ + {% if mark.review.shared_link %} href="{{ mark.review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.review.created_time|date }}
diff --git a/journal/templates/profile.html b/journal/templates/profile.html index 660ffca0..4f08bd79 100644 --- a/journal/templates/profile.html +++ b/journal/templates/profile.html @@ -15,14 +15,14 @@ {% else %} {{ site_name }} - {{ user.display_name }} {% endif %} - + {% if user.preference.no_anonymous_view %}{% endif %} {% include "common_libs.html" with jquery=0 v2=1 %} diff --git a/journal/templates/review.html b/journal/templates/review.html index 3c892ee4..bfdc36a9 100644 --- a/journal/templates/review.html +++ b/journal/templates/review.html @@ -41,7 +41,7 @@

+ {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% if request.user == review.owner %}{% endif %} diff --git a/journal/templates/user_collection_list.html b/journal/templates/user_collection_list.html index e8071350..ed8ed9e9 100644 --- a/journal/templates/user_collection_list.html +++ b/journal/templates/user_collection_list.html @@ -37,7 +37,7 @@

+ {% if collection.shared_link %} href="{{ collection.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ collection.created_time|date }} diff --git a/journal/templatetags/collection.py b/journal/templatetags/collection.py index b6a577b6..b8f4b0b3 100644 --- a/journal/templatetags/collection.py +++ b/journal/templatetags/collection.py @@ -1,32 +1,34 @@ from django import template from django.template.defaultfilters import stringfilter -from journal.models import Collection, Like +from journal.models import Collection +from journal.models.mixins import UserOwnedObjectMixin +from users.models.user import User register = template.Library() @register.simple_tag(takes_context=True) -def user_visibility_of(context, piece): +def user_visibility_of(context, piece: UserOwnedObjectMixin): user = context["request"].user return piece.is_visible_to(user) @register.simple_tag() -def user_progress_of(collection, user): +def user_progress_of(collection: Collection, user: User): return ( - collection.get_progress_for_user(user) if user and user.is_authenticated else 0 + collection.get_progress(user.identity) if user and user.is_authenticated else 0 ) @register.simple_tag() -def user_stats_of(collection, user): - return collection.get_stats_for_user(user) if user and user.is_authenticated else {} +def user_stats_of(collection: Collection, user: User): + return collection.get_stats(user.identity) if user and user.is_authenticated else {} @register.filter(is_safe=True) @stringfilter -def prural_items(category): +def prural_items(category: str): # TODO support i18n here # return _(f"items of {category}") if category == "book": diff --git a/journal/templatetags/user_actions.py b/journal/templatetags/user_actions.py index 6f9eecb7..d1a68b5d 100644 --- a/journal/templatetags/user_actions.py +++ b/journal/templatetags/user_actions.py @@ -2,6 +2,7 @@ from django.urls import reverse from journal.models import Collection, Like +from takahe.utils import Takahe register = template.Library() @@ -22,10 +23,9 @@ def wish_item_action(context, item): def like_piece_action(context, piece): user = context["request"].user action = {} - if user and user.is_authenticated: + if user and user.is_authenticated and piece and piece.post_id: action = { - "taken": piece.owner == user - or Like.objects.filter(target=piece, owner=user).first() is not None, + "taken": Takahe.post_liked_by(piece.post_id, user), "url": reverse("journal:like", args=[piece.uuid]), } return action @@ -34,4 +34,9 @@ def like_piece_action(context, piece): @register.simple_tag(takes_context=True) def liked_piece(context, piece): user = context["request"].user - return user and user.is_authenticated and Like.user_liked_piece(user, piece) + return ( + user + and user.is_authenticated + and piece.post_id + and Takahe.get_user_interaction(piece.post_id, user, "like") + ) diff --git a/journal/tests.py b/journal/tests.py index d422bfbb..0e87ae1e 100644 --- a/journal/tests.py +++ b/journal/tests.py @@ -9,15 +9,16 @@ class CollectionTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") - self.user = User.register(email="a@b.com") - pass + self.user = User.register(email="a@b.com", username="user") def test_collection(self): - collection = Collection.objects.create(title="test", owner=self.user) - collection = Collection.objects.filter(title="test", owner=self.user).first() + Collection.objects.create(title="test", owner=self.user.identity) + collection = Collection.objects.get(title="test", owner=self.user.identity) self.assertEqual(collection.catalog_item.title, "test") member1 = collection.append_item(self.book1) member1.note = "my notes" @@ -38,13 +39,15 @@ def test_collection(self): class ShelfTest(TestCase): + databases = "__all__" + def setUp(self): pass def test_shelf(self): - user = User.register(mastodon_site="site", mastodon_username="name") - shelf_manager = ShelfManager(user=user) - self.assertEqual(user.shelf_set.all().count(), 3) + user = User.register(email="a@b.com", username="user") + shelf_manager = user.identity.shelf_manager + self.assertEqual(len(shelf_manager.shelf_list.items()), 3) book1 = Edition.objects.create(title="Hyperion") book2 = Edition.objects.create(title="Andymion") q1 = shelf_manager.get_shelf(ShelfType.WISHLIST) @@ -64,90 +67,86 @@ def test_shelf(self): self.assertEqual(q2.members.all().count(), 1) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 2) - self.assertEqual(log.last().metadata, {}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 1}) time.sleep(0.001) self.assertEqual(q1.members.all().count(), 1) self.assertEqual(q2.members.all().count(), 1) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 3) - self.assertEqual(log.last().metadata, {"progress": 1}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 1}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 1}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 3) - self.assertEqual(log.last().metadata, {"progress": 1}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 1}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 10}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 4) - self.assertEqual(log.last().metadata, {"progress": 10}) + + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 10}) shelf_manager.move_item(book1, ShelfType.PROGRESS) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 4) - self.assertEqual(log.last().metadata, {"progress": 10}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 10}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 90}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 5) - self.assertEqual(Mark(user, book1).visibility, 0) + self.assertEqual(Mark(user.identity, book1).visibility, 0) shelf_manager.move_item( book1, ShelfType.PROGRESS, metadata={"progress": 90}, visibility=1 ) time.sleep(0.001) - self.assertEqual(Mark(user, book1).visibility, 1) + self.assertEqual(Mark(user.identity, book1).visibility, 1) self.assertEqual(shelf_manager.get_log_for_item(book1).count(), 5) - # test silence mark mode -> no log - shelf_manager.move_item(book1, ShelfType.WISHLIST, silence=True) - self.assertEqual(log.count(), 5) - shelf_manager.move_item(book1, ShelfType.PROGRESS, silence=True) - self.assertEqual(log.count(), 5) - # test delete one log - first_log = log.first() - Mark(user, book1).delete_log(first_log.id) - self.assertEqual(log.count(), 4) - # # test delete mark -> leave one log: 移除标记 - # Mark(user, book1).delete() - # self.assertEqual(log.count(), 1) - # # test delete all logs - # shelf_manager.move_item(book1, ShelfType.PROGRESS) - # self.assertEqual(log.count(), 2) - # Mark(user, book1).delete(silence=True) - # self.assertEqual(log.count(), 0) + # test delete mark -> one more log + Mark(user.identity, book1).delete() + self.assertEqual(log.count(), 6) class TagTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") - self.movie1 = Edition.objects.create(title="Hyperion, The Movie") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") - self.user2 = User.register(mastodon_site="site2", mastodon_username="name2") - self.user3 = User.register(mastodon_site="site2", mastodon_username="name3") + self.movie1 = Edition.objects.create(title="Fight Club") + self.user1 = User.register(email="a@b.com", username="user") + self.user2 = User.register(email="x@b.com", username="user2") + self.user3 = User.register(email="y@b.com", username="user3") pass def test_user_tag(self): t1 = "tag 1" t2 = "tag 2" t3 = "tag 3" - TagManager.tag_item_by_user(self.book1, self.user2, [t1, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t1, t3]) self.assertEqual(self.book1.tags, [t1, t3]) - TagManager.tag_item_by_user(self.book1, self.user2, [t2, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t2, t3]) self.assertEqual(self.book1.tags, [t2, t3]) class MarkTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") + self.user1 = User.register(email="a@b.com", username="user") pref = self.user1.preference pref.default_visibility = 2 pref.save() def test_mark(self): - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, None) self.assertEqual(mark.shelf_label, None) self.assertEqual(mark.comment_text, None) @@ -157,7 +156,7 @@ def test_mark(self): self.assertEqual(mark.tags, []) mark.update(ShelfType.WISHLIST, "a gentle comment", 9, 1) - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, ShelfType.WISHLIST) self.assertEqual(mark.shelf_label, "想读的书") self.assertEqual(mark.comment_text, "a gentle comment") @@ -166,10 +165,17 @@ def test_mark(self): self.assertEqual(mark.review, None) self.assertEqual(mark.tags, []) - review = Review.review_item_by_user(self.book1, self.user1, "Critic", "Review") - mark = Mark(self.user1, self.book1) + def test_review(self): + review = Review.update_item_review( + self.book1, self.user1.identity, "Critic", "Review" + ) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.review, review) + Review.update_item_review(self.book1, self.user1.identity, None, None) + mark = Mark(self.user1.identity, self.book1) + self.assertIsNone(mark.review) - TagManager.tag_item_by_user(self.book1, self.user1, [" Sci-Fi ", " fic "]) - mark = Mark(self.user1, self.book1) + def test_tag(self): + TagManager.tag_item(self.book1, self.user1.identity, [" Sci-Fi ", " fic "]) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.tags, ["Sci-Fi", "fic"]) diff --git a/journal/views/collection.py b/journal/views/collection.py index 6519498e..b89d834f 100644 --- a/journal/views/collection.py +++ b/journal/views/collection.py @@ -1,28 +1,28 @@ from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied -from django.http import Http404, HttpResponse, HttpResponseRedirect +from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext_lazy as _ -from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from journal.models.renderers import convert_leading_space_in_md +from catalog.models import Item +from common.utils import AuthedHttpRequest, get_uuid_or_404 from mastodon.api import share_collection from users.models import User +from users.models.apidentity import APIdentity from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_relogin +from .common import render_relogin, target_identity_required @login_required -def add_to_collection(request, item_uuid): +def add_to_collection(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if request.method == "GET": - collections = Collection.objects.filter(owner=request.user) + collections = Collection.objects.filter(owner=request.user.identity) return render( request, "add_to_collection.html", @@ -35,14 +35,14 @@ def add_to_collection(request, item_uuid): cid = int(request.POST.get("collection_id", default=0)) if not cid: cid = Collection.objects.create( - owner=request.user, title=f"{request.user.display_name}的收藏单" + owner=request.user.identity, title=f"{request.user.display_name}的收藏单" ).id - collection = Collection.objects.get(owner=request.user, id=cid) + collection = Collection.objects.get(owner=request.user.identity, id=cid) collection.append_item(item, note=request.POST.get("note")) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) -def collection_retrieve(request, collection_uuid): +def collection_retrieve(request: AuthedHttpRequest, collection_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() @@ -53,19 +53,19 @@ def collection_retrieve(request, collection_uuid): else False ) featured_since = ( - collection.featured_by_user_since(request.user) + collection.featured_since(request.user.identity) if request.user.is_authenticated else None ) available_as_featured = ( request.user.is_authenticated - and (following or request.user == collection.owner) + and (following or request.user.identity == collection.owner) and not featured_since and collection.members.all().exists() ) stats = {} if featured_since: - stats = collection.get_stats_for_user(request.user) + stats = collection.get_stats(request.user.identity) stats["wishlist_deg"] = ( round(stats["wishlist"] / stats["total"] * 360) if stats["total"] else 0 ) @@ -90,33 +90,35 @@ def collection_retrieve(request, collection_uuid): @login_required -def collection_add_featured(request, collection_uuid): +def collection_add_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() - FeaturedCollection.objects.update_or_create(owner=request.user, target=collection) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + FeaturedCollection.objects.update_or_create( + owner=request.user.identity, target=collection + ) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_remove_featured(request, collection_uuid): +def collection_remove_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() fc = FeaturedCollection.objects.filter( - owner=request.user, target=collection + owner=request.user.identity, target=collection ).first() if fc: fc.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_share(request, collection_uuid): +def collection_share(request: AuthedHttpRequest, collection_uuid): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -130,14 +132,16 @@ def collection_share(request, collection_uuid): visibility = int(request.POST.get("visibility", default=0)) comment = request.POST.get("comment") if share_collection(collection, comment, request.user, visibility): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: return render_relogin(request) else: raise BadRequest() -def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): +def collection_retrieve_items( + request: AuthedHttpRequest, collection_uuid, edit=False, msg=None +): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() @@ -155,7 +159,7 @@ def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): @login_required -def collection_append_item(request, collection_uuid): +def collection_append_item(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -175,7 +179,7 @@ def collection_append_item(request, collection_uuid): @login_required -def collection_remove_item(request, collection_uuid, item_uuid): +def collection_remove_item(request: AuthedHttpRequest, collection_uuid, item_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -187,7 +191,9 @@ def collection_remove_item(request, collection_uuid, item_uuid): @login_required -def collection_move_item(request, direction, collection_uuid, item_uuid): +def collection_move_item( + request: AuthedHttpRequest, direction, collection_uuid, item_uuid +): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -202,7 +208,7 @@ def collection_move_item(request, direction, collection_uuid, item_uuid): @login_required -def collection_update_member_order(request, collection_uuid): +def collection_update_member_order(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -217,7 +223,7 @@ def collection_update_member_order(request, collection_uuid): @login_required -def collection_update_item_note(request, collection_uuid, item_uuid): +def collection_update_item_note(request: AuthedHttpRequest, collection_uuid, item_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_editable_by(request.user): raise PermissionDenied() @@ -241,7 +247,7 @@ def collection_update_item_note(request, collection_uuid, item_uuid): @login_required -def collection_edit(request, collection_uuid=None): +def collection_edit(request: AuthedHttpRequest, collection_uuid=None): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -259,7 +265,7 @@ def collection_edit(request, collection_uuid=None): { "form": form, "collection": collection, - "user": collection.owner if collection else request.user, + "user": collection.owner.user if collection else request.user, }, ) elif request.method == "POST": @@ -270,7 +276,7 @@ def collection_edit(request, collection_uuid=None): ) if form.is_valid(): if not collection: - form.instance.owner = request.user + form.instance.owner = request.user.identity form.instance.edited_time = timezone.now() form.save() return redirect( @@ -283,47 +289,34 @@ def collection_edit(request, collection_uuid=None): @login_required -def user_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(owner=user) - if user != request.user: - if request.user.is_following(user): - collections = collections.filter(visibility__in=[0, 1]) - else: - collections = collections.filter(visibility=0) +@target_identity_required +def user_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(owner=target).filter( + q_owned_piece_visible_to_user(request.user, target) + ) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, "collections": collections, }, ) @login_required -def user_liked_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(likes__owner=user) - if user != request.user: - collections = collections.filter(query_visible(request.user)) +@target_identity_required +def user_liked_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(likes__owner=target) + if target.user != request.user: + collections = collections.filter(q_piece_visible_to_user(request.user)) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, "collections": collections, "liked": True, }, diff --git a/journal/views/common.py b/journal/views/common.py index cb36aa36..38e4178d 100644 --- a/journal/views/common.py +++ b/journal/views/common.py @@ -1,3 +1,5 @@ +import functools + from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied from django.core.paginator import Paginator @@ -6,8 +8,8 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from users.models import User +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 +from users.models import APIdentity from users.views import render_user_blocked, render_user_not_found from ..forms import * @@ -16,6 +18,25 @@ PAGE_SIZE = 10 +def target_identity_required(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + request = kwargs["request"] + handler = kwargs["user_name"] + try: + target = APIdentity.get_by_handler(handler) + except: + return render_user_not_found(request) + if not target.is_visible_to_user(request.user): + return render_user_blocked(request) + request.target_identity = target + # request.identity = ( + # request.user.identity if request.user.is_authenticated else None + # ) + + return wrapper + + def render_relogin(request): return render( request, @@ -41,42 +62,45 @@ def render_list_not_found(request): ) +@login_required +@target_identity_required def render_list( - request, user_name, type, shelf_type=None, item_category=None, tag_title=None + request: AuthedHttpRequest, + user_name, + type, + shelf_type=None, + item_category=None, + tag_title=None, ): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) + target = request.target_identity + viewer = request.user.identity tag = None if type == "mark": - queryset = user.shelf_manager.get_latest_members(shelf_type, item_category) + queryset = target.user.shelf_manager.get_latest_members( + shelf_type, item_category + ) elif type == "tagmember": - tag = Tag.objects.filter(owner=user, title=tag_title).first() + tag = Tag.objects.filter(owner=target, title=tag_title).first() if not tag: return render_list_not_found(request) - if tag.visibility != 0 and user != request.user: + if tag.visibility != 0 and target != viewer: return render_list_not_found(request) queryset = TagMember.objects.filter(parent=tag) - elif type == "review": - queryset = Review.objects.filter(owner=user) - queryset = queryset.filter(query_item_category(item_category)) + elif type == "review" and item_category: + queryset = Review.objects.filter(q_item_in_category(item_category)) else: raise BadRequest() - queryset = queryset.filter(q_visible_to(request.user, user)).order_by( - "-created_time" - ) + queryset = queryset.filter( + q_owned_piece_visible_to_user(request.user, target) + ).order_by("-created_time") paginator = Paginator(queryset, PAGE_SIZE) - page_number = request.GET.get("page", default=1) + page_number = int(request.GET.get("page", default=1)) members = paginator.get_page(page_number) pagination = PageLinksGenerator(PAGE_SIZE, page_number, paginator.num_pages) return render( request, f"user_{type}_list.html", - {"user": user, "members": members, "tag": tag, "pagination": pagination}, + {"user": target.user, "members": members, "tag": tag, "pagination": pagination}, ) diff --git a/journal/views/mark.py b/journal/views/mark.py index b121e89d..1e24269c 100644 --- a/journal/views/mark.py +++ b/journal/views/mark.py @@ -12,17 +12,18 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from mastodon.api import ( get_spoiler_text, get_status_id_by_url, get_visibility, post_toot, ) +from takahe.utils import Takahe from ..forms import * from ..models import * -from .common import render_list, render_relogin +from .common import render_list, render_relogin, target_identity_required _logger = logging.getLogger(__name__) PAGE_SIZE = 10 @@ -31,28 +32,29 @@ @login_required -def wish(request, item_uuid): +def wish(request: AuthedHttpRequest, item_uuid): if request.method != "POST": raise BadRequest() item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item: raise Http404() - request.user.shelf_manager.move_item(item, ShelfType.WISHLIST) + request.user.identity.shelf_manager.move_item(item, ShelfType.WISHLIST) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) return HttpResponse(_checkmark) @login_required -def like(request, piece_uuid): +def like(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_like_piece(request.user, piece) + if piece.post_id: + Takahe.like_post(piece.post_id, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -68,15 +70,16 @@ def like(request, piece_uuid): @login_required -def unlike(request, piece_uuid): +def unlike(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_unlike_piece(request.user, piece) + if piece.post_id: + Takahe.unlike_post(piece.post_id, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -92,11 +95,11 @@ def unlike(request, piece_uuid): @login_required -def mark(request, item_uuid): +def mark(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "GET": - tags = TagManager.get_item_tags_by_user(item, request.user) + tags = request.user.identity.tag_manager.get_item_tags(item) shelf_types = [ (n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category ] @@ -115,15 +118,8 @@ def mark(request, item_uuid): ) elif request.method == "POST": if request.POST.get("delete", default=False): - silence = request.POST.get("silence", False) - mark.delete(silence=silence) - if ( - silence - ): # this means the mark is deleted from mark_history, thus redirect to item page - return redirect( - reverse("catalog:retrieve", args=[item.url_path, item.uuid]) - ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + mark.delete() + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: visibility = int(request.POST.get("visibility", default=0)) rating_grade = request.POST.get("rating_grade", default=0) @@ -143,7 +139,7 @@ def mark(request, item_uuid): ) if mark_date and mark_date >= timezone.now(): mark_date = None - TagManager.tag_item_by_user(item, request.user, tags, visibility) + TagManager.tag_item(item, request.user.identity, tags, visibility) try: mark.update( status, @@ -167,7 +163,7 @@ def mark(request, item_uuid): "secondary_msg": err, }, ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() @@ -202,12 +198,12 @@ def share_comment(user, item, text, visibility, shared_link=None, position=None) @login_required -def mark_log(request, item_uuid, log_id): +def mark_log(request: AuthedHttpRequest, item_uuid, log_id): """ Delete log of one item by log id. """ item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "POST": if request.GET.get("delete", default=False): if log_id: @@ -219,7 +215,7 @@ def mark_log(request, item_uuid, log_id): @login_required -def comment(request, item_uuid): +def comment(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item.class_name in ["podcastepisode", "tvepisode"]: raise BadRequest("不支持评论此类型的条目") @@ -246,7 +242,7 @@ def comment(request, item_uuid): if not comment: raise Http404() comment.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) visibility = int(request.POST.get("visibility", default=0)) text = request.POST.get("text") position = None @@ -302,12 +298,11 @@ def comment(request, item_uuid): # ) if post_error: return render_relogin(request) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() -@login_required -def user_mark_list(request, user_name, shelf_type, item_category): +def user_mark_list(request: AuthedHttpRequest, user_name, shelf_type, item_category): return render_list( request, user_name, "mark", shelf_type=shelf_type, item_category=item_category ) diff --git a/journal/views/profile.py b/journal/views/profile.py index 04876050..7f6f6dfc 100644 --- a/journal/views/profile.py +++ b/journal/views/profile.py @@ -6,30 +6,32 @@ from user_messages import api as msg from catalog.models import * -from users.models import User +from common.utils import AuthedHttpRequest +from users.models import APIdentity, User from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required -def profile(request, user_name): +@target_identity_required +def profile(request: AuthedHttpRequest, user_name): if request.method != "GET": raise BadRequest() - user = User.get(user_name, case_sensitive=True) - if user is None or not user.is_active: - return render_user_not_found(request) - if user.mastodon_acct != user_name and user.username != user_name: - return redirect(user.url) - if not request.user.is_authenticated and user.preference.no_anonymous_view: - return render(request, "users/home_anonymous.html", {"user": user}) - if user != request.user and ( - user.is_blocked_by(request.user) or user.is_blocking(request.user) + target = request.target_identity + # if user.mastodon_acct != user_name and user.username != user_name: + # return redirect(user.url) + if not request.user.is_authenticated and target.preference.no_anonymous_view: + return render(request, "users/home_anonymous.html", {"user": target.user}) + me = target.user == request.user + if not me and ( + target.is_blocked_by(request.user.identity) + or target.is_blocking(request.user.identity) ): return render_user_blocked(request) - qv = q_visible_to(request.user, user) + qv = q_owned_piece_visible_to_user(request.user, target) shelf_list = {} visbile_categories = [ ItemCategory.Book, @@ -43,9 +45,9 @@ def profile(request, user_name): for category in visbile_categories: shelf_list[category] = {} for shelf_type in ShelfType: - label = user.shelf_manager.get_label(shelf_type, category) + label = target.shelf_manager.get_label(shelf_type, category) if label is not None: - members = user.shelf_manager.get_latest_members( + members = target.shelf_manager.get_latest_members( shelf_type, category ).filter(qv) shelf_list[category][shelf_type] = { @@ -53,35 +55,32 @@ def profile(request, user_name): "count": members.count(), "members": members[:10].prefetch_related("item"), } - reviews = ( - Review.objects.filter(owner=user) - .filter(qv) - .filter(query_item_category(category)) - .order_by("-created_time") + reviews = Review.objects.filter(q_item_in_category(category)).order_by( + "-created_time" ) shelf_list[category]["reviewed"] = { "title": "评论过的" + category.label, "count": reviews.count(), "members": reviews[:10].prefetch_related("item"), } - collections = ( - Collection.objects.filter(owner=user).filter(qv).order_by("-created_time") - ) + collections = Collection.objects.filter(qv).order_by("-created_time") liked_collections = ( - Like.user_likes_by_class(user, Collection) + Like.user_likes_by_class(target, Collection) .order_by("-edited_time") .values_list("target_id", flat=True) ) - if user != request.user: - liked_collections = liked_collections.filter(query_visible(request.user)) - top_tags = user.tag_manager.public_tags[:10] + if not me: + liked_collections = liked_collections.filter( + q_piece_visible_to_user(request.user) + ) + top_tags = target.tag_manager.public_tags[:10] else: - top_tags = user.tag_manager.all_tags[:10] + top_tags = target.tag_manager.all_tags[:10] return render( request, "profile.html", { - "user": user, + "user": target.user, "top_tags": top_tags, "shelf_list": shelf_list, "collections": collections[:10], @@ -91,7 +90,7 @@ def profile(request, user_name): for i in liked_collections.order_by("-edited_time")[:10] ], "liked_collections_count": liked_collections.count(), - "layout": user.preference.profile_layout, + "layout": target.preference.profile_layout, }, ) @@ -102,7 +101,7 @@ def user_calendar_data(request, user_name): user = User.get(user_name) if user is None or not request.user.is_authenticated: return HttpResponse("") - max_visiblity = max_visiblity_to(request.user, user) + max_visiblity = max_visiblity_to_user(request.user, user.identity) calendar_data = user.shelf_manager.get_calendar_data(max_visiblity) return render( request, diff --git a/journal/views/review.py b/journal/views/review.py index 52904779..adce1029 100644 --- a/journal/views/review.py +++ b/journal/views/review.py @@ -12,9 +12,11 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from journal.models.renderers import convert_leading_space_in_md, render_md +from mastodon.api import share_review from users.models import User +from users.models.apidentity import APIdentity from ..forms import * from ..models import * @@ -32,7 +34,7 @@ def review_retrieve(request, review_uuid): @login_required -def review_edit(request, item_uuid, review_uuid=None): +def review_edit(request: AuthedHttpRequest, item_uuid, review_uuid=None): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) review = ( get_object_or_404(Review, uid=get_uuid_or_404(review_uuid)) @@ -65,24 +67,28 @@ def review_edit(request, item_uuid, review_uuid=None): if form.is_valid(): mark_date = None if request.POST.get("mark_anotherday"): - dt = parse_datetime(request.POST.get("mark_date") + " 20:00:00") + dt = parse_datetime(request.POST.get("mark_date", "") + " 20:00:00") mark_date = ( dt.replace(tzinfo=timezone.get_current_timezone()) if dt else None ) body = form.instance.body if request.POST.get("leading_space"): body = convert_leading_space_in_md(body) - review = Review.review_item_by_user( + review = Review.update_item_review( item, - request.user, + request.user.identity, form.cleaned_data["title"], body, form.cleaned_data["visibility"], mark_date, - form.cleaned_data["share_to_mastodon"], ) if not review: raise BadRequest() + if ( + form.cleaned_data["share_to_mastodon"] + and request.user.mastodon_username + ): + share_review(review) return redirect(reverse("journal:review_retrieve", args=[review.uuid])) else: raise BadRequest() @@ -90,7 +96,6 @@ def review_edit(request, item_uuid, review_uuid=None): raise BadRequest() -@login_required def user_review_list(request, user_name, item_category): return render_list(request, user_name, "review", item_category=item_category) @@ -100,16 +105,16 @@ def user_review_list(request, user_name, item_category): class ReviewFeed(Feed): def get_object(self, request, id): - return User.get(id) + return APIdentity.get_by_handler(id) - def title(self, user): - return "%s的评论" % user.display_name if user else "无效链接" + def title(self, owner): + return "%s的评论" % owner.display_name if owner else "无效链接" - def link(self, user): - return user.url if user else settings.SITE_INFO["site_url"] + def link(self, owner): + return owner.url if owner else settings.SITE_INFO["site_url"] - def description(self, user): - return "%s的评论合集 - NeoDB" % user.display_name if user else "无效链接" + def description(self, owner): + return "%s的评论合集 - NeoDB" % owner.display_name if owner else "无效链接" def items(self, user): if user is None or user.preference.no_anonymous_view: diff --git a/journal/views/tag.py b/journal/views/tag.py index b2847349..c9f1239f 100644 --- a/journal/views/tag.py +++ b/journal/views/tag.py @@ -13,29 +13,24 @@ from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required PAGE_SIZE = 10 @login_required +@target_identity_required def user_tag_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - tags = Tag.objects.filter(owner=user) - if user != request.user: + target = request.target + tags = Tag.objects.filter(owner=target) + if target.user != request.user: tags = tags.filter(visibility=0) tags = tags.values("title").annotate(total=Count("members")).order_by("-total") return render( request, "user_tag_list.html", { - "user": user, + "user": target.user, "tags": tags, }, ) @@ -47,7 +42,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.GET.get("tag", ""), replace=False) if not tag_title: raise Http404() - tag = Tag.objects.filter(owner=request.user, title=tag_title).first() + tag = Tag.objects.filter(owner=request.user.identity, title=tag_title).first() if not tag: raise Http404() return render(request, "tag_edit.html", {"tag": tag}) @@ -55,7 +50,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.POST.get("title", ""), replace=False) tag_id = request.POST.get("id") tag = ( - Tag.objects.filter(owner=request.user, id=tag_id).first() + Tag.objects.filter(owner=request.user.identity, id=tag_id).first() if tag_id else None ) @@ -70,7 +65,9 @@ def user_tag_edit(request): ) elif ( tag_title != tag.title - and Tag.objects.filter(owner=request.user, title=tag_title).exists() + and Tag.objects.filter( + owner=request.user.identity, title=tag_title + ).exists() ): msg.error(request.user, _("标签已存在")) return HttpResponseRedirect(request.META.get("HTTP_REFERER")) @@ -88,6 +85,5 @@ def user_tag_edit(request): raise BadRequest() -@login_required def user_tag_member_list(request, user_name, tag_title): return render_list(request, user_name, "tagmember", tag_title=tag_title) diff --git a/mastodon/api.py b/mastodon/api.py index 0a43e048..0f5f1d69 100644 --- a/mastodon/api.py +++ b/mastodon/api.py @@ -1,5 +1,5 @@ import functools -import logging +import html import random import re import string @@ -193,7 +193,7 @@ def detect_server_info(login_domain): try: response = get(url, headers={"User-Agent": USER_AGENT}) except Exception as e: - logger.error(f"Error connecting {login_domain} {e}") + logger.error(f"Error connecting {login_domain}: {e}") raise Exception(f"无法连接 {login_domain}") if response.status_code != 200: logger.error(f"Error connecting {login_domain}: {response.status_code}") @@ -363,7 +363,7 @@ def get_visibility(visibility, user): def share_mark(mark): from catalog.common import ItemCategory - user = mark.owner + user = mark.owner.user if mark.visibility == 2: visibility = TootVisibilityEnum.DIRECT elif mark.visibility == 1: @@ -466,10 +466,10 @@ def share_collection(collection, comment, user, visibility_no): ) user_str = ( "我" - if user == collection.owner + if user == collection.owner.user else ( - " @" + collection.owner.mastodon_acct + " " - if collection.owner.mastodon_acct + " @" + collection.owner.user.mastodon_acct + " " + if collection.owner.user.mastodon_acct else " " + collection.owner.username + " " ) ) diff --git a/pyproject.toml b/pyproject.toml index b242d65f..e48493aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.pyright] -exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/sites/douban_*" ] +exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*" ] [tool.djlint] ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031" diff --git a/requirements-dev.txt b/requirements-dev.txt index b146c739..513e98fe 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,5 +4,6 @@ django-debug-toolbar django-stubs djlint~=1.32.1 isort~=5.12.0 +lxml-stubs pre-commit pyright==1.1.322 diff --git a/requirements.txt b/requirements.txt index 98d75b85..87127e33 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,8 @@ +cachetools dateparser discord.py django~=4.2.4 django-anymail -django-auditlog django-auditlog @ git+https://github.com/jazzband/django-auditlog.git@45591463e8192b4ac0095e259cc4dcea0ac2fd6c django-bleach django-compressor @@ -25,6 +25,7 @@ easy-thumbnails filetype fontawesomefree gunicorn +httpx igdb-api-v4 libsass listparser @@ -41,3 +42,4 @@ rq>=1.12.0 setproctitle tqdm typesense +urlman diff --git a/social/migrations/0007_alter_localactivity_owner.py b/social/migrations/0007_alter_localactivity_owner.py new file mode 100644 index 00000000..f7e3176b --- /dev/null +++ b/social/migrations/0007_alter_localactivity_owner.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:26 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0012_apidentity"), + ("social", "0006_alter_localactivity_template"), + ] + + operations = [ + migrations.AlterField( + model_name="localactivity", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + ] diff --git a/social/models.py b/social/models.py index f0e4190e..c89197e8 100644 --- a/social/models.py +++ b/social/models.py @@ -27,7 +27,7 @@ ShelfMember, UserOwnedObjectMixin, ) -from users.models import User +from users.models import APIdentity _logger = logging.getLogger(__name__) @@ -42,10 +42,8 @@ class ActivityTemplate(models.TextChoices): class LocalActivity(models.Model, UserOwnedObjectMixin): - owner = models.ForeignKey(User, on_delete=models.CASCADE) - visibility = models.PositiveSmallIntegerField( - default=0 - ) # 0: Public / 1: Follower only / 2: Self only + owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) # type: ignore + visibility = models.PositiveSmallIntegerField(default=0) # type: ignore template = models.CharField( blank=False, choices=ActivityTemplate.choices, max_length=50 ) @@ -62,11 +60,11 @@ def __str__(self): class ActivityManager: - def __init__(self, user): - self.owner = user + def __init__(self, owner: APIdentity): + self.owner = owner def get_timeline(self, before_time=None): - following = [x for x in self.owner.following if x not in self.owner.ignoring] + following = [x for x in self.owner.following if x not in self.owner.muting] q = Q(owner_id__in=following, visibility__lt=2) | Q(owner=self.owner) if before_time: q = q & Q(created_time__lt=before_time) @@ -205,5 +203,5 @@ def updated(self): super().updated() -def reset_social_visibility_for_user(user: User, visibility: int): - LocalActivity.objects.filter(owner=user).update(visibility=visibility) +def reset_social_visibility_for_user(owner: APIdentity, visibility: int): + LocalActivity.objects.filter(owner=owner).update(visibility=visibility) diff --git a/social/templates/activity/comment_child_item.html b/social/templates/activity/comment_child_item.html index a1d92c68..c048b3d2 100644 --- a/social/templates/activity/comment_child_item.html +++ b/social/templates/activity/comment_child_item.html @@ -53,7 +53,7 @@ {% endif %} - +
diff --git a/social/templates/activity/mark_item.html b/social/templates/activity/mark_item.html index 083ffd2b..8a52f79a 100644 --- a/social/templates/activity/mark_item.html +++ b/social/templates/activity/mark_item.html @@ -40,7 +40,7 @@ {% endif %} - +
diff --git a/social/templates/activity/review_item.html b/social/templates/activity/review_item.html index 277d0a83..3092cd71 100644 --- a/social/templates/activity/review_item.html +++ b/social/templates/activity/review_item.html @@ -33,7 +33,7 @@ {% endif %} - +
diff --git a/social/tests.py b/social/tests.py index 3d6093f2..b881977e 100644 --- a/social/tests.py +++ b/social/tests.py @@ -2,65 +2,86 @@ from catalog.models import * from journal.models import * +from takahe.utils import Takahe from users.models import User from .models import * class SocialTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") self.movie = Edition.objects.create(title="Fight Club") - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") + self.alice = User.register( + username="Alice", mastodon_site="MySpace", mastodon_username="Alice" + ) + self.bob = User.register( + username="Bob", mastodon_site="KKCity", mastodon_username="Bob" + ) def test_timeline(self): + alice_feed = self.alice.identity.activity_manager + bob_feed = self.bob.identity.activity_manager + # alice see 0 activity in timeline in the beginning - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) + self.assertEqual(len(alice_feed.get_timeline()), 0) # 1 activity after adding first book to shelf - self.alice.shelf_manager.move_item(self.book1, ShelfType.WISHLIST, visibility=1) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 1) + self.alice.identity.shelf_manager.move_item( + self.book1, ShelfType.WISHLIST, visibility=1 + ) + self.assertEqual(len(alice_feed.get_timeline()), 1) # 2 activities after adding second book to shelf - self.alice.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) + self.assertEqual(len(alice_feed.get_timeline()), 2) # 2 activities after change first mark - self.alice.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) + self.assertEqual(len(alice_feed.get_timeline()), 2) # bob see 0 activity in timeline in the beginning - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 0) + self.assertEqual(len(bob_feed.get_timeline()), 0) # bob follows alice, see 2 activities - self.bob.mastodon_following = ["Alice@MySpace"] - self.alice.mastodon_follower = ["Bob@KKCity"] - self.bob.merge_relationships() - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # bob mute, then unmute alice + self.bob.identity.mute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + self.bob.identity.unmute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) # alice:3 bob:2 after alice adding second book to shelf as private - self.alice.shelf_manager.move_item(self.movie, ShelfType.WISHLIST, visibility=2) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 3) - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) - - # remote unfollow - self.bob.mastodon_following = [] - self.alice.mastodon_follower = [] - self.bob.merge_relationships() - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) - - # local follow - self.bob.follow(self.alice) - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item( + self.movie, ShelfType.WISHLIST, visibility=2 + ) + self.assertEqual(len(alice_feed.get_timeline()), 3) + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # alice mute bob + self.alice.identity.mute(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # bob unfollow alice + self.bob.identity.unfollow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + + # bob follow alice + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # alice block bob + self.alice.identity.block(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) diff --git a/social/views.py b/social/views.py index 9a68b775..757cb2f4 100644 --- a/social/views.py +++ b/social/views.py @@ -1,7 +1,6 @@ import logging -from django.conf import settings -from django.contrib.auth.decorators import login_required, permission_required +from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest from django.shortcuts import render from django.utils.translation import gettext_lazy as _ @@ -65,7 +64,7 @@ def data(request): request, "feed_data.html", { - "activities": ActivityManager(request.user).get_timeline( + "activities": ActivityManager(request.user.identity).get_timeline( before_time=request.GET.get("last") )[:PAGE_SIZE], }, diff --git a/takahe/__init__.py b/takahe/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/admin.py b/takahe/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/takahe/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/takahe/ap_handlers.py b/takahe/ap_handlers.py new file mode 100644 index 00000000..b4d0ee6a --- /dev/null +++ b/takahe/ap_handlers.py @@ -0,0 +1,123 @@ +from datetime import datetime + +from loguru import logger + +from catalog.common import * +from journal.models import Comment, Piece, Rating, Review, ShelfMember +from users.models import User as NeoUser + +from .models import Follow, Identity, Post +from .utils import Takahe + +_supported_ap_catalog_item_types = [ + "Edition", + "Movie", + "TVShow", + "TVSeason", + "TVEpisode", + "Album", + "Game", + "Podcast", + "Performance", + "PerformanceProduction", +] + +_supported_ap_journal_types = { + "Status": ShelfMember, + "Rating": Rating, + "Comment": Comment, + "Review": Review, +} + + +def _parse_links(objects): + logger.debug(f"Parsing links from {objects}") + items = [] + pieces = [] + for obj in objects: + if obj["type"] in _supported_ap_catalog_item_types: + items.append(obj["url"]) + elif obj["type"] in _supported_ap_journal_types.keys(): + pieces.append(obj) + else: + logger.warning(f'Unknown link type {obj["type"]}') + return items, pieces + + +def _get_or_create_item_by_ap_url(url): + logger.debug(f"Fetching item by ap from {url}") + site = SiteManager.get_site_by_url(url) + if not site: + return None + site.get_resource_ready() + item = site.get_item() + return item + + +def _get_visibility(post_visibility): + match post_visibility: + case 2: + return 1 + case 3: + return 2 + case _: + return 0 + + +def _update_or_create_post(pk, obj): + post = Post.objects.get(pk=pk) + owner = Takahe.get_or_create_apidentity(post.author) + if not post.type_data: + logger.warning(f"Post {post} has no type_data") + return + items, pieces = _parse_links(post.type_data["object"]["relatedWith"]) + logger.info(f"Post {post} has items {items} and pieces {pieces}") + if len(items) == 0: + logger.warning(f"Post {post} has no remote items") + return + elif len(items) > 1: + logger.warning(f"Post {post} has more than one remote item") + return + remote_url = items[0] + item = _get_or_create_item_by_ap_url(remote_url) + if not item: + logger.warning(f"Post {post} has no local item") + return + for p in pieces: + cls = _supported_ap_journal_types[p["type"]] + cls.update_by_ap_object(owner, item, p, pk, _get_visibility(post.visibility)) + + +def post_created(pk, obj): + _update_or_create_post(pk, obj) + + +def post_updated(pk, obj): + _update_or_create_post(pk, obj) + + +def post_deleted(pk, obj): + Piece.objects.filter(post_id=pk, local=False).delete() + + +def user_follow_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_following(u) + logger.info(f"User {u} following updated") + + +def user_mute_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_muting(u) + logger.info(f"User {u} muting updated") + + +def user_block_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") + u = Takahe.get_local_user_by_identity(target_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") diff --git a/takahe/apps.py b/takahe/apps.py new file mode 100644 index 00000000..7d39fe99 --- /dev/null +++ b/takahe/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class TakaheConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "takahe" diff --git a/takahe/db_routes.py b/takahe/db_routes.py new file mode 100644 index 00000000..09015105 --- /dev/null +++ b/takahe/db_routes.py @@ -0,0 +1,27 @@ +from django.conf import settings + +_is_testing = "testserver" in settings.ALLOWED_HOSTS + + +class TakaheRouter: + def db_for_read(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def db_for_write(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def allow_relation(self, obj1, obj2, **hints): + # skip this check but please make sure + # not create relations between takahe models and other apps + if obj1._meta.app_label == "takahe" or obj2._meta.app_label == "takahe": + return obj1._meta.app_label == obj2._meta.app_label + return None + + def allow_migrate(self, db, app_label, model_name=None, **hints): + if app_label == "takahe" or db == "takahe": + return _is_testing and app_label == db + return None diff --git a/takahe/html.py b/takahe/html.py new file mode 100644 index 00000000..c598be2f --- /dev/null +++ b/takahe/html.py @@ -0,0 +1,379 @@ +import html +import re +from html.parser import HTMLParser + +from django.utils.safestring import mark_safe + + +class FediverseHtmlParser(HTMLParser): + """ + A custom HTML parser that only allows a certain tag subset and behaviour: + - br, p tags are passed through + - a tags are passed through if they're not hashtags or mentions + - Another set of tags are converted to p + + It also linkifies URLs, mentions, hashtags, and imagifies emoji. + """ + + REWRITE_TO_P = [ + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "blockquote", + "pre", + "ul", + "ol", + ] + + REWRITE_TO_BR = [ + "br", + "li", + ] + + MENTION_REGEX = re.compile( + r"(^|[^\w\d\-_/])@([\w\d\-_]+(?:@[\w\d\-_\.]+[\w\d\-_]+)?)" + ) + + HASHTAG_REGEX = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + EMOJI_REGEX = re.compile(r"\B:([a-zA-Z0-9(_)-]+):\B") + + URL_REGEX = re.compile( + r"""(\(* # Match any opening parentheses. + \b(?"]*)?) + # /path/zz (excluding "unsafe" chars from RFC 1738, + # except for # and ~, which happen in practice) + """, + re.IGNORECASE | re.VERBOSE | re.UNICODE, + ) + + def __init__( + self, + html: str, + uri_domain: str | None = None, + mentions: list | None = None, + find_mentions: bool = False, + find_hashtags: bool = False, + find_emojis: bool = False, + emoji_domain=None, + ): + super().__init__() + self.uri_domain = uri_domain + self.emoji_domain = emoji_domain + self.find_mentions = find_mentions + self.find_hashtags = find_hashtags + self.find_emojis = find_emojis + self.calculate_mentions(mentions) + self._data_buffer = "" + self.html_output = "" + self.text_output = "" + self.emojis: set[str] = set() + self.mentions: set[str] = set() + self.hashtags: set[str] = set() + self._pending_a: dict | None = None + self._fresh_p = False + self.feed(html.replace("\n", "")) + self.flush_data() + + def calculate_mentions(self, mentions: list | None): + """ + Prepares a set of content that we expect to see mentions look like + (this imp) + """ + self.mention_matches: dict[str, str] = {} + self.mention_aliases: dict[str, str] = {} + for mention in mentions or []: + if self.uri_domain: + url = mention.absolute_profile_uri() + else: + url = str(mention.urls.view) + if mention.username: + username = mention.username.lower() + domain = mention.domain_id.lower() + self.mention_matches[f"{username}"] = url + self.mention_matches[f"{username}@{domain}"] = url + self.mention_matches[mention.absolute_profile_uri()] = url + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + elif tag in self.REWRITE_TO_BR: + self.flush_data() + if not self._fresh_p: + self.html_output += "
" + self.text_output += "\n" + elif tag == "a": + self.flush_data() + self._pending_a = {"attrs": dict(attrs), "content": ""} + self._fresh_p = tag in self.REWRITE_TO_P + + def handle_endtag(self, tag: str) -> None: + self._fresh_p = False + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + self.text_output += "\n\n" + elif tag == "a": + if self._pending_a: + href = self._pending_a["attrs"].get("href") + content = self._pending_a["content"].strip() + has_ellipsis = "ellipsis" in self._pending_a["attrs"].get("class", "") + # Is it a mention? + if content.lower().lstrip("@") in self.mention_matches: + self.html_output += self.create_mention(content, href) + self.text_output += content + # Is it a hashtag? + elif self.HASHTAG_REGEX.match(content): + self.html_output += self.create_hashtag(content) + self.text_output += content + elif content: + # Shorten the link if we need to + self.html_output += self.create_link( + href, + content, + has_ellipsis=has_ellipsis, + ) + self.text_output += href + self._pending_a = None + + def handle_data(self, data: str) -> None: + self._fresh_p = False + if self._pending_a: + self._pending_a["content"] += data + else: + self._data_buffer += data + + def flush_data(self) -> None: + """ + We collect data segments until we encounter a tag we care about, + so we can treat #hashtag as #hashtag + """ + self.text_output += self._data_buffer + self.html_output += self.linkify(self._data_buffer) + self._data_buffer = "" + + def create_link(self, href, content, has_ellipsis=False): + """ + Generates a link, doing optional shortening. + + All return values from this function should be HTML-safe. + """ + looks_like_link = bool(self.URL_REGEX.match(content)) + if looks_like_link: + protocol, content = content.split("://", 1) + else: + protocol = "" + if (looks_like_link and len(content) > 30) or has_ellipsis: + return f'{html.escape(content[:30])}' + elif looks_like_link: + return f'{html.escape(content)}' + else: + return f'{html.escape(content)}' + + def create_mention(self, handle, href: str | None = None) -> str: + """ + Generates a mention link. Handle should have a leading @. + + All return values from this function should be HTML-safe + """ + handle = handle.lstrip("@") + if "@" in handle: + short_handle = handle.split("@", 1)[0] + else: + short_handle = handle + handle_hash = handle.lower() + short_hash = short_handle.lower() + self.mentions.add(handle_hash) + url = self.mention_matches.get(handle_hash) + # If we have a captured link out, use that as the actual resolver + if href and href in self.mention_matches: + url = self.mention_matches[href] + if url: + if short_hash not in self.mention_aliases: + self.mention_aliases[short_hash] = handle_hash + elif self.mention_aliases.get(short_hash) != handle_hash: + short_handle = handle + return f'@{html.escape(short_handle)}' + else: + return "@" + html.escape(handle) + + def create_hashtag(self, hashtag) -> str: + """ + Generates a hashtag link. Hashtag does not need to start with # + + All return values from this function should be HTML-safe + """ + hashtag = hashtag.lstrip("#") + self.hashtags.add(hashtag.lower()) + if self.uri_domain: + return f'' + else: + return f'' + + def create_emoji(self, shortcode) -> str: + """ + Generates an emoji tag + + All return values from this function should be HTML-safe + """ + from .models import Emoji + + emoji = Emoji.get_by_domain(shortcode, self.emoji_domain) + if emoji and emoji.is_usable: + self.emojis.add(shortcode) + return emoji.as_html() + return f":{shortcode}:" + + def linkify(self, data): + """ + Linkifies some content that is plaintext. + + Handles URLs first, then mentions. Note that this takes great care to + keep track of what is HTML and what needs to be escaped. + """ + # Split the string by the URL regex so we know what to escape and what + # not to escape. + bits = self.URL_REGEX.split(data) + result = "" + # Even indices are data we should pass though, odd indices are links + for i, bit in enumerate(bits): + # A link! + if i % 2 == 1: + result += self.create_link(bit, bit) + # Not a link + elif self.mention_matches or self.find_mentions: + result += self.linkify_mentions(bit) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_mentions(self, data): + """ + Linkifies mentions + """ + bits = self.MENTION_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Mention content + if i % 3 == 2: + result += self.create_mention(bit) + # Not part of a mention (0) or mention preamble (1) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_hashtags(self, data): + """ + Linkifies hashtags + """ + bits = self.HASHTAG_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of a hashtag + if i % 2 == 0: + if self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + # Hashtag content + else: + result += self.create_hashtag(bit) + return result + + def linkify_emoji(self, data): + """ + Linkifies emoji + """ + bits = self.EMOJI_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of an emoji + if i % 2 == 0: + result += html.escape(bit) + # Emoji content + else: + result += self.create_emoji(bit) + return result + + @property + def html(self): + return self.html_output.strip() + + @property + def plain_text(self): + return self.text_output.strip() + + +class ContentRenderer: + """ + Renders HTML for posts, identity fields, and more. + + The `local` parameter affects whether links are absolute (False) or relative (True) + """ + + def __init__(self, local: bool): + self.local = local + + def render_post(self, html: str, post) -> str: + """ + Given post HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + mentions=post.mentions.all(), + uri_domain=(None if self.local else post.author.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=post.author.domain, + ) + return mark_safe(parser.html) + + def render_identity_summary(self, html: str, identity) -> str: + """ + Given identity summary HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + return mark_safe(parser.html) + + def render_identity_data(self, html: str, identity, strip: bool = False) -> str: + """ + Given name/basic value HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=False, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + if strip: + return mark_safe(parser.html) + else: + return mark_safe(parser.html) diff --git a/takahe/management/commands/takahe.py b/takahe/management/commands/takahe.py new file mode 100644 index 00000000..fc555a28 --- /dev/null +++ b/takahe/management/commands/takahe.py @@ -0,0 +1,42 @@ +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Count, F +from loguru import logger +from tqdm import tqdm + +from catalog.common import * +from catalog.common.models import * +from catalog.models import * +from journal.models import Tag, update_journal_for_merged_item +from takahe.utils import * +from users.models import User as NeoUser + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument( + "--verbose", + action="store_true", + ) + parser.add_argument( + "--sync", + action="store_true", + ) + + def sync(self): + logger.info(f"Syncing domain...") + Takahe.get_domain() + logger.info(f"Syncing users...") + for u in tqdm(NeoUser.objects.filter(is_active=True, username__isnull=False)): + Takahe.init_identity_for_local_user(u) + # Takahe.update_user_following(u) + # Takahe.update_user_muting(u) + # Takahe.update_user_rejecting(u) + + def handle(self, *args, **options): + self.verbose = options["verbose"] + + if options["sync"]: + self.sync() + + self.stdout.write(self.style.SUCCESS(f"Done.")) diff --git a/takahe/migrations/0001_initial.py b/takahe/migrations/0001_initial.py new file mode 100644 index 00000000..66193237 --- /dev/null +++ b/takahe/migrations/0001_initial.py @@ -0,0 +1,489 @@ +# Generated by Django 4.2.4 on 2023-08-12 16:48 + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + +import takahe.models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Domain", + fields=[ + ( + "domain", + models.CharField(max_length=250, primary_key=True, serialize=False), + ), + ( + "service_domain", + models.CharField( + blank=True, + db_index=True, + max_length=250, + null=True, + unique=True, + ), + ), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("nodeinfo", models.JSONField(blank=True, null=True)), + ("local", models.BooleanField()), + ("blocked", models.BooleanField(default=False)), + ("public", models.BooleanField(default=False)), + ("default", models.BooleanField(default=False)), + ("notes", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "users_domain", + }, + ), + migrations.CreateModel( + name="Emoji", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("shortcode", models.SlugField(max_length=100)), + ("local", models.BooleanField(default=True)), + ("public", models.BooleanField(null=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ("mimetype", models.CharField(max_length=200)), + ("file", models.ImageField(blank=True, null=True, upload_to="")), + ("remote_url", models.CharField(blank=True, max_length=500, null=True)), + ("category", models.CharField(blank=True, max_length=100, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="takahe.domain", + ), + ), + ], + options={ + "db_table": "activities_emoji", + }, + ), + migrations.CreateModel( + name="Hashtag", + fields=[ + ( + "hashtag", + models.SlugField(max_length=100, primary_key=True, serialize=False), + ), + ( + "name_override", + models.CharField(blank=True, max_length=100, null=True), + ), + ("public", models.BooleanField(null=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("stats_updated", models.DateTimeField(blank=True, null=True)), + ("aliases", models.JSONField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "activities_hashtag", + }, + ), + migrations.CreateModel( + name="Identity", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_identity, + primary_key=True, + serialize=False, + ), + ), + ("actor_uri", models.CharField(max_length=500, unique=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField(db_index=True)), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ("name", models.CharField(blank=True, max_length=500, null=True)), + ("summary", models.TextField(blank=True, null=True)), + ( + "manually_approves_followers", + models.BooleanField(blank=True, null=True), + ), + ("discoverable", models.BooleanField(default=True)), + ( + "profile_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("inbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "shared_inbox_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("outbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ("icon_uri", models.CharField(blank=True, max_length=500, null=True)), + ("image_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "followers_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "following_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "featured_collection_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("actor_type", models.CharField(default="person", max_length=100)), + ("metadata", models.JSONField(blank=True, null=True)), + ("pinned", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ( + "restriction", + models.IntegerField( + choices=[(0, "None"), (1, "Limited"), (2, "Blocked")], + db_index=True, + default=0, + ), + ), + ("admin_notes", models.TextField(blank=True, null=True)), + ("private_key", models.TextField(blank=True, null=True)), + ("public_key", models.TextField(blank=True, null=True)), + ("public_key_id", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("fetched", models.DateTimeField(blank=True, null=True)), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="identities", + to="takahe.domain", + ), + ), + ], + options={ + "verbose_name_plural": "identities", + "db_table": "users_identity", + }, + ), + migrations.CreateModel( + name="Post", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField()), + ( + "object_uri", + models.CharField( + blank=True, max_length=2048, null=True, unique=True + ), + ), + ( + "visibility", + models.IntegerField( + choices=[ + (0, "Public"), + (4, "Local Only"), + (1, "Unlisted"), + (2, "Followers"), + (3, "Mentioned"), + ], + default=0, + ), + ), + ("content", models.TextField()), + ( + "type", + models.CharField( + choices=[ + ("Article", "Article"), + ("Audio", "Audio"), + ("Event", "Event"), + ("Image", "Image"), + ("Note", "Note"), + ("Page", "Page"), + ("Question", "Question"), + ("Video", "Video"), + ], + default="Note", + max_length=20, + ), + ), + ("type_data", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ("summary", models.TextField(blank=True, null=True)), + ("url", models.CharField(blank=True, max_length=2048, null=True)), + ( + "in_reply_to", + models.CharField( + blank=True, db_index=True, max_length=500, null=True + ), + ), + ("hashtags", models.JSONField(blank=True, null=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("edited", models.DateTimeField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "author", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="posts", + to="takahe.identity", + ), + ), + ( + "emojis", + models.ManyToManyField( + blank=True, related_name="posts_using_emoji", to="takahe.emoji" + ), + ), + ( + "mentions", + models.ManyToManyField( + blank=True, + related_name="posts_mentioning", + to="takahe.identity", + ), + ), + ( + "to", + models.ManyToManyField( + blank=True, related_name="posts_to", to="takahe.identity" + ), + ), + ], + options={ + "db_table": "activities_post", + }, + ), + migrations.CreateModel( + name="User", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ("email", models.EmailField(max_length=254, unique=True)), + ("admin", models.BooleanField(default=False)), + ("moderator", models.BooleanField(default=False)), + ("banned", models.BooleanField(default=False)), + ("deleted", models.BooleanField(default=False)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("last_seen", models.DateTimeField(auto_now_add=True)), + ], + options={ + "db_table": "users_user", + }, + ), + migrations.CreateModel( + name="PostInteraction", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post_interaction, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ( + "type", + models.CharField( + choices=[ + ("like", "Like"), + ("boost", "Boost"), + ("vote", "Vote"), + ("pin", "Pin"), + ], + max_length=100, + ), + ), + ("value", models.CharField(blank=True, max_length=50, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "identity", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.identity", + ), + ), + ( + "post", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.post", + ), + ), + ], + options={ + "db_table": "activities_postinteraction", + }, + ), + migrations.AddField( + model_name="identity", + name="users", + field=models.ManyToManyField( + blank=True, related_name="identities", to="takahe.user" + ), + ), + migrations.AddField( + model_name="domain", + name="users", + field=models.ManyToManyField( + blank=True, related_name="domains", to="takahe.user" + ), + ), + migrations.CreateModel( + name="Block", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("mute", models.BooleanField()), + ("include_notifications", models.BooleanField(default=False)), + ("expires", models.DateTimeField(blank=True, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_blocks", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_blocks", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_block", + }, + ), + migrations.AlterUniqueTogether( + name="identity", + unique_together={("username", "domain")}, + ), + migrations.CreateModel( + name="Follow", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_follow, + primary_key=True, + serialize=False, + ), + ), + ( + "boosts", + models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ), + ), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("state", models.CharField(default="unrequested", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_follows", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_follows", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_follow", + "unique_together": {("source", "target")}, + }, + ), + ] diff --git a/takahe/migrations/__init__.py b/takahe/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/models.py b/takahe/models.py new file mode 100644 index 00000000..c9879c12 --- /dev/null +++ b/takahe/models.py @@ -0,0 +1,1395 @@ +import datetime +import re +import secrets +import ssl +import time +from datetime import date +from functools import cached_property, partial +from typing import TYPE_CHECKING, Literal, Optional +from urllib.parse import urlparse + +import httpx +import urlman +from cachetools import TTLCache, cached +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding, rsa +from django.conf import settings +from django.contrib.auth.models import AbstractBaseUser, BaseUserManager +from django.db import models, transaction +from django.template.defaultfilters import linebreaks_filter +from django.utils import timezone +from django.utils.safestring import mark_safe +from loguru import logger +from lxml import etree + +from .html import FediverseHtmlParser +from .uris import * + +if TYPE_CHECKING: + from django.db.models.manager import RelatedManager + + +class Snowflake: + """ + Snowflake ID generator and parser. + """ + + # Epoch is 2022/1/1 at midnight, as these are used for _created_ times in our + # own database, not original publish times (which would need an earlier one) + EPOCH = 1641020400 + + TYPE_POST = 0b000 + TYPE_POST_INTERACTION = 0b001 + TYPE_IDENTITY = 0b010 + TYPE_REPORT = 0b011 + TYPE_FOLLOW = 0b100 + + @classmethod + def generate(cls, type_id: int) -> int: + """ + Generates a snowflake-style ID for the given "type". They are designed + to fit inside 63 bits (a signed bigint) + + ID layout is: + * 41 bits of millisecond-level timestamp (enough for EPOCH + 69 years) + * 19 bits of random data (1% chance of clash at 10000 per millisecond) + * 3 bits of type information + + We use random data rather than a sequence ID to try and avoid pushing + this job onto the DB - we may do that in future. If a clash does + occur, the insert will fail and Stator will retry the work for anything + that's coming in remotely, leaving us to just handle that scenario for + our own posts, likes, etc. + """ + # Get the current time in milliseconds + now: int = int((time.time() - cls.EPOCH) * 1000) + # Generate random data + rand_seq: int = secrets.randbits(19) + # Compose them together + return (now << 22) | (rand_seq << 3) | type_id + + @classmethod + def get_type(cls, snowflake: int) -> int: + """ + Returns the type of a given snowflake ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return snowflake & 0b111 + + @classmethod + def get_time(cls, snowflake: int) -> float: + """ + Returns the generation time (in UNIX timestamp seconds) of the ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return ((snowflake >> 22) / 1000) + cls.EPOCH + + # Handy pre-baked methods for django model defaults + @classmethod + def generate_post(cls) -> int: + return cls.generate(cls.TYPE_POST) + + @classmethod + def generate_post_interaction(cls) -> int: + return cls.generate(cls.TYPE_POST_INTERACTION) + + @classmethod + def generate_identity(cls) -> int: + return cls.generate(cls.TYPE_IDENTITY) + + @classmethod + def generate_report(cls) -> int: + return cls.generate(cls.TYPE_REPORT) + + @classmethod + def generate_follow(cls) -> int: + return cls.generate(cls.TYPE_FOLLOW) + + +class RsaKeys: + @classmethod + def generate_keypair(cls) -> tuple[str, str]: + """ + Generates a new RSA keypair + """ + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + ) + private_key_serialized = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("ascii") + public_key_serialized = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode("ascii") + ) + return private_key_serialized, public_key_serialized + + +class User(AbstractBaseUser): + identities: "RelatedManager[Identity]" + + class Meta: + # managed = False + db_table = "users_user" + + email = models.EmailField(unique=True) + admin = models.BooleanField(default=False) + moderator = models.BooleanField(default=False) + banned = models.BooleanField(default=False) + deleted = models.BooleanField(default=False) + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + last_seen = models.DateTimeField(auto_now_add=True) + USERNAME_FIELD = "email" + EMAIL_FIELD = "email" + REQUIRED_FIELDS: list[str] = [] + + @property + def is_active(self): + return not (self.deleted or self.banned) + + @property + def is_superuser(self): + return self.admin + + @property + def is_staff(self): + return self.admin + + def has_module_perms(self, module): + return self.admin + + def has_perm(self, perm): + return self.admin + + # @cached_property + # def config_user(self) -> Config.UserOptions: + # return Config.load_user(self) + + +class Domain(models.Model): + """ + Represents a domain that a user can have an account on. + + For protocol reasons, if we want to allow custom usernames + per domain, each "display" domain (the one in the handle) must either let + us serve on it directly, or have a "service" domain that maps + to it uniquely that we can serve on that. + + That way, someone coming in with just an Actor URI as their + entrypoint can still try to webfinger preferredUsername@actorDomain + and we can return an appropriate response. + + It's possible to just have one domain do both jobs, of course. + This model also represents _other_ servers' domains, which we treat as + display domains for now, until we start doing better probing. + """ + + domain = models.CharField(max_length=250, primary_key=True) + service_domain = models.CharField( + max_length=250, + null=True, + blank=True, + db_index=True, + unique=True, + ) + + # state = StateField(DomainStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # nodeinfo 2.0 detail about the remote server + nodeinfo = models.JSONField(null=True, blank=True) + + # If we own this domain + local = models.BooleanField() + + # If we have blocked this domain from interacting with us + blocked = models.BooleanField(default=False) + + # Domains can be joinable by any user of the instance (as the default one + # should) + public = models.BooleanField(default=False) + + # If this is the default domain (shown as the default entry for new users) + default = models.BooleanField(default=False) + + # Domains can also be linked to one or more users for their private use + # This should be display domains ONLY + users = models.ManyToManyField("takahe.User", related_name="domains", blank=True) + + # Free-form notes field for admins + notes = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class urls(urlman.Urls): + root = "/admin/domains/" + create = "/admin/domains/create/" + edit = "/admin/domains/{self.domain}/" + delete = "{edit}delete/" + root_federation = "/admin/federation/" + edit_federation = "/admin/federation/{self.domain}/" + + class Meta: + # managed = False + db_table = "users_domain" + indexes: list = [] + + @classmethod + def get_remote_domain(cls, domain: str) -> "Domain": + return cls.objects.get_or_create(domain=domain.lower(), local=False)[0] + + @classmethod + def get_domain(cls, domain: str) -> Optional["Domain"]: + try: + return cls.objects.get( + models.Q(domain=domain.lower()) + | models.Q(service_domain=domain.lower()) + ) + except cls.DoesNotExist: + return None + + @property + def uri_domain(self) -> str: + if self.service_domain: + return self.service_domain + return self.domain + + @classmethod + def available_for_user(cls, user): + """ + Returns domains that are available for the user to put an identity on + """ + return cls.objects.filter( + models.Q(public=True) | models.Q(users__id=user.id), + local=True, + ).order_by("-default", "domain") + + def __str__(self): + return self.domain + + +class Identity(models.Model): + """ + Represents both local and remote Fediverse identities (actors) + """ + + domain_id: int + + class Restriction(models.IntegerChoices): + none = 0 + limited = 1 + blocked = 2 + + ACTOR_TYPES = ["person", "service", "application", "group", "organization"] + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_identity) + + # The Actor URI is essentially also a PK - we keep the default numeric + # one around as well for making nice URLs etc. + actor_uri = models.CharField(max_length=500, unique=True) + + # state = StateField(IdentityStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + local = models.BooleanField(db_index=True) + users = models.ManyToManyField( + "takahe.User", + related_name="identities", + blank=True, + ) + + username = models.CharField(max_length=500, blank=True, null=True) + # Must be a display domain if present + domain = models.ForeignKey( + Domain, + blank=True, + null=True, + on_delete=models.PROTECT, + related_name="identities", + ) + + name = models.CharField(max_length=500, blank=True, null=True) + summary = models.TextField(blank=True, null=True) + manually_approves_followers = models.BooleanField(blank=True, null=True) + discoverable = models.BooleanField(default=True) + + profile_uri = models.CharField(max_length=500, blank=True, null=True) + inbox_uri = models.CharField(max_length=500, blank=True, null=True) + shared_inbox_uri = models.CharField(max_length=500, blank=True, null=True) + outbox_uri = models.CharField(max_length=500, blank=True, null=True) + icon_uri = models.CharField(max_length=500, blank=True, null=True) + image_uri = models.CharField(max_length=500, blank=True, null=True) + followers_uri = models.CharField(max_length=500, blank=True, null=True) + following_uri = models.CharField(max_length=500, blank=True, null=True) + featured_collection_uri = models.CharField(max_length=500, blank=True, null=True) + actor_type = models.CharField(max_length=100, default="person") + + # icon = models.ImageField( + # upload_to=partial(upload_namer, "profile_images"), blank=True, null=True + # ) + # image = models.ImageField( + # upload_to=partial(upload_namer, "background_images"), blank=True, null=True + # ) + + # Should be a list of {"name":..., "value":...} dicts + metadata = models.JSONField(blank=True, null=True) + + # Should be a list of object URIs (we don't want a full M2M here) + pinned = models.JSONField(blank=True, null=True) + + # Admin-only moderation fields + sensitive = models.BooleanField(default=False) + restriction = models.IntegerField( + choices=Restriction.choices, default=Restriction.none, db_index=True + ) + admin_notes = models.TextField(null=True, blank=True) + + private_key = models.TextField(null=True, blank=True) + public_key = models.TextField(null=True, blank=True) + public_key_id = models.TextField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + fetched = models.DateTimeField(null=True, blank=True) + deleted = models.DateTimeField(null=True, blank=True) + + # objects = IdentityManager() + + ### Model attributes ### + + class Meta: + # managed = False + db_table = "users_identity" + verbose_name_plural = "identities" + unique_together = [("username", "domain")] + indexes: list = [] # We need this so Stator can add its own + + class urls(urlman.Urls): + view = "/@{self.username}@{self.domain_id}/" + replies = "{view}replies/" + settings = "{view}settings/" + action = "{view}action/" + followers = "{view}followers/" + following = "{view}following/" + search = "{view}search/" + activate = "{view}activate/" + admin = "/admin/identities/" + admin_edit = "{admin}{self.pk}/" + djadmin_edit = "/djadmin/users/identity/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.domain.uri_domain + + def __str__(self): + if self.username and self.domain: + return self.handle + return self.actor_uri + + def absolute_profile_uri(self): + """ + Returns a profile URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return f"https://{self.domain.uri_domain}/@{self.username}/" + else: + return self.profile_uri + + @property + def handle(self): + if self.username is None: + return "(unknown user)" + if self.domain_id: + return f"{self.username}@{self.domain_id}" + return f"{self.username}@(unknown server)" + + @property + def user_pk(self): + user = self.users.first() + return user.pk if user else None + + @classmethod + def fetch_webfinger_url(cls, domain: str) -> str: + """ + Given a domain (hostname), returns the correct webfinger URL to use + based on probing host-meta. + """ + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + f"https://{domain}/.well-known/host-meta", + follow_redirects=True, + headers={"Accept": "application/xml"}, + ) + + # In the case of anything other than a success, we'll still try + # hitting the webfinger URL on the domain we were given to handle + # incorrectly setup servers. + if response.status_code == 200 and response.content.strip(): + tree = etree.fromstring(response.content) + template = tree.xpath( + "string(.//*[local-name() = 'Link' and @rel='lrdd' and (not(@type) or @type='application/jrd+json')]/@template)" + ) + if template: + return template # type: ignore + except (httpx.RequestError, etree.ParseError): + pass + + return f"https://{domain}/.well-known/webfinger?resource={{uri}}" + + @classmethod + def fetch_webfinger(cls, handle: str) -> tuple[str | None, str | None]: + """ + Given a username@domain handle, returns a tuple of + (actor uri, canonical handle) or None, None if it does not resolve. + """ + domain = handle.split("@")[1].lower() + try: + webfinger_url = cls.fetch_webfinger_url(domain) + except ssl.SSLCertVerificationError: + return None, None + + # Go make a Webfinger request + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + webfinger_url.format(uri=f"acct:{handle}"), + follow_redirects=True, + headers={"Accept": "application/json"}, + ) + response.raise_for_status() + except (httpx.HTTPError, ssl.SSLCertVerificationError) as ex: + response = getattr(ex, "response", None) + if ( + response + and response.status_code < 500 + and response.status_code not in [400, 401, 403, 404, 406, 410] + ): + raise ValueError( + f"Client error fetching webfinger: {response.status_code}", + response.content, + ) + return None, None + + try: + data = response.json() + except ValueError: + # Some servers return these with a 200 status code! + if b"not found" in response.content.lower(): + return None, None + raise ValueError( + "JSON parse error fetching webfinger", + response.content, + ) + try: + if data["subject"].startswith("acct:"): + data["subject"] = data["subject"][5:] + for link in data["links"]: + if ( + link.get("type") == "application/activity+json" + and link.get("rel") == "self" + ): + return link["href"], data["subject"] + except KeyError: + # Server returning wrong payload structure + pass + return None, None + + @classmethod + def by_username_and_domain( + cls, + username: str, + domain: str | Domain, + fetch: bool = False, + local: bool = False, + ): + """ + Get an Identity by username and domain. + + When fetch is True, a failed lookup will do a webfinger lookup to attempt to do + a lookup by actor_uri, creating an Identity record if one does not exist. When + local is True, lookups will be restricted to local domains. + + If domain is a Domain, domain.local is used instead of passsed local. + + """ + if username.startswith("@"): + raise ValueError("Username must not start with @") + + domain_instance = None + + if isinstance(domain, Domain): + domain_instance = domain + local = domain.local + domain = domain.domain + else: + domain = domain.lower() + try: + if local: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + local=True, + ) + else: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + ) + except cls.DoesNotExist: + if fetch and not local: + actor_uri, handle = cls.fetch_webfinger(f"{username}@{domain}") + if handle is None: + return None + # See if this actually does match an existing actor + try: + return cls.objects.get(actor_uri=actor_uri) + except cls.DoesNotExist: + pass + # OK, make one + username, domain = handle.split("@") + if not domain_instance: + domain_instance = Domain.get_remote_domain(domain) + return cls.objects.create( + actor_uri=actor_uri, + username=username, + domain_id=domain_instance, + local=False, + ) + return None + + def generate_keypair(self): + if not self.local: + raise ValueError("Cannot generate keypair for remote user") + self.private_key, self.public_key = RsaKeys.generate_keypair() + self.public_key_id = self.actor_uri + "#main-key" + self.save() + + +class Follow(models.Model): + """ + When one user (the source) follows other (the target) + """ + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_follow) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_follows", + ) + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_follows", + ) + + boosts = models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + note = models.TextField(blank=True, null=True) + + # state = StateField(FollowStates) + state = models.CharField(max_length=100, default="unrequested") + state_changed = models.DateTimeField(auto_now_add=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_follow" + unique_together = [("source", "target")] + indexes: list = [] # We need this so Stator can add its own + + def __str__(self): + return f"#{self.id}: {self.source} → {self.target}" + + +class Post(models.Model): + """ + A post (status, toot) that is either local or remote. + """ + + interactions: "models.QuerySet[PostInteraction]" + + class Visibilities(models.IntegerChoices): + public = 0 + local_only = 4 + unlisted = 1 + followers = 2 + mentioned = 3 + + class Types(models.TextChoices): + article = "Article" + audio = "Audio" + event = "Event" + image = "Image" + note = "Note" + page = "Page" + question = "Question" + video = "Video" + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_post) + + # The author (attributedTo) of the post + author = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="posts", + ) + + # The state the post is in + # state = StateField(PostStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # If it is our post or not + local = models.BooleanField() + + # The canonical object ID + object_uri = models.CharField(max_length=2048, blank=True, null=True, unique=True) + + # Who should be able to see this Post + visibility = models.IntegerField( + choices=Visibilities.choices, + default=Visibilities.public, + ) + + # The main (HTML) content + content = models.TextField() + + type = models.CharField( + max_length=20, + choices=Types.choices, + default=Types.note, + ) + type_data = models.JSONField( + blank=True, + null=True, # , encoder=PostTypeDataEncoder, decoder=PostTypeDataDecoder + ) + + # If the contents of the post are sensitive, and the summary (content + # warning) to show if it is + sensitive = models.BooleanField(default=False) + summary = models.TextField(blank=True, null=True) + + # The public, web URL of this Post on the original server + url = models.CharField(max_length=2048, blank=True, null=True) + + # The Post it is replying to as an AP ID URI + # (as otherwise we'd have to pull entire threads to use IDs) + in_reply_to = models.CharField(max_length=500, blank=True, null=True, db_index=True) + + # The identities the post is directly to (who can see it if not public) + to = models.ManyToManyField( + "takahe.Identity", + related_name="posts_to", + blank=True, + ) + + # The identities mentioned in the post + mentions = models.ManyToManyField( + "takahe.Identity", + related_name="posts_mentioning", + blank=True, + ) + + # Hashtags in the post + hashtags = models.JSONField(blank=True, null=True) + + emojis = models.ManyToManyField( + "takahe.Emoji", + related_name="posts_using_emoji", + blank=True, + ) + + # Like/Boost/etc counts + stats = models.JSONField(blank=True, null=True) + + # When the post was originally created (as opposed to when we received it) + published = models.DateTimeField(default=timezone.now) + + # If the post has been edited after initial publication + edited = models.DateTimeField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "activities_post" + + class urls(urlman.Urls): + view = "{self.author.urls.view}posts/{self.id}/" + object_uri = "{self.author.actor_uri}posts/{self.id}/" + action_like = "{view}like/" + action_unlike = "{view}unlike/" + action_boost = "{view}boost/" + action_unboost = "{view}unboost/" + action_bookmark = "{view}bookmark/" + action_unbookmark = "{view}unbookmark/" + action_delete = "{view}delete/" + action_edit = "{view}edit/" + action_report = "{view}report/" + action_reply = "/compose/?reply_to={self.id}" + admin_edit = "/djadmin/activities/post/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.author.domain.uri_domain + + def __str__(self): + return f"{self.author} #{self.id}" + + def get_absolute_url(self): + return self.urls.view + + def absolute_object_uri(self): + """ + Returns an object URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return self.author.absolute_profile_uri() + f"posts/{self.id}/" + else: + return self.object_uri + + def in_reply_to_post(self) -> Optional["Post"]: + """ + Returns the actual Post object we're replying to, if we can find it + """ + if self.in_reply_to is None: + return None + return ( + Post.objects.filter(object_uri=self.in_reply_to) + .select_related("author") + .first() + ) + + @classmethod + def create_local( + cls, + author: Identity, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool = False, + visibility: int = Visibilities.public, + reply_to: Optional["Post"] = None, + attachments: list | None = None, + type_data: dict | None = None, + published: datetime.datetime | None = None, + ) -> "Post": + with transaction.atomic(): + # Find mentions in this post + mentions = cls.mentions_from_content(content, author) + # mentions = set() + if reply_to: + mentions.add(reply_to.author) + # Maintain local-only for replies + if reply_to.visibility == reply_to.Visibilities.local_only: + visibility = reply_to.Visibilities.local_only + # Find emoji in this post + emojis = Emoji.emojis_from_content(content, None) + # Strip all unwanted HTML and apply linebreaks filter, grabbing hashtags on the way + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + content = parser.html.replace("

", "

" + raw_prepend_content, 1) + hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + # Make the Post object + post = cls.objects.create( + author=author, + content=content, + summary=summary or None, + sensitive=bool(summary) or sensitive, + local=True, + visibility=visibility, + hashtags=hashtags, + in_reply_to=reply_to.object_uri if reply_to else None, + ) + post.object_uri = post.urls.object_uri + post.url = post.absolute_object_uri() + post.mentions.set(mentions) + post.emojis.set(emojis) + if published and published < timezone.now(): + post.published = published + if timezone.now() - published > datetime.timedelta(days=2): + post.state = "fanned_out" # add post quietly if it's old + # if attachments:# FIXME + # post.attachments.set(attachments) + # if question: # FIXME + # post.type = question["type"] + # post.type_data = PostTypeData(__root__=question).__root__ + if type_data: + post.type_data = type_data + post.save() + # Recalculate parent stats for replies + if reply_to: + reply_to.calculate_stats() + return post + + def edit_local( + self, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool | None = None, + visibility: int = Visibilities.public, + attachments: list | None = None, + attachment_attributes: list | None = None, + type_data: dict | None = None, + ): + with transaction.atomic(): + # Strip all HTML and apply linebreaks filter + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + self.content = parser.html.replace("

", "

" + raw_prepend_content, 1) + self.hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + self.summary = summary or None + self.sensitive = bool(summary) if sensitive is None else sensitive + self.visibility = visibility + self.edited = timezone.now() + self.mentions.set(self.mentions_from_content(content, self.author)) + self.emojis.set(Emoji.emojis_from_content(content, None)) + # self.attachments.set(attachments or []) # fixme + if type_data: + self.type_data = type_data + self.save() + + for attrs in attachment_attributes or []: + attachment = next( + (a for a in attachments or [] if str(a.id) == attrs.id), None + ) + if attachment is None: + continue + attachment.name = attrs.description + attachment.save() + + self.state = "edited" + self.state_changed = timezone.now() + self.state_next_attempt = None + self.state_locked_until = None + self.save() + + @classmethod + def mentions_from_content(cls, content, author) -> set[Identity]: + mention_hits = FediverseHtmlParser(content, find_mentions=True).mentions + mentions = set() + for handle in mention_hits: + handle = handle.lower() + if "@" in handle: + username, domain = handle.split("@", 1) + else: + username = handle + domain = author.domain_id + identity = Identity.by_username_and_domain( + username=username, + domain=domain, + fetch=True, + ) + if identity is not None: + mentions.add(identity) + return mentions + + def ensure_hashtags(self) -> None: + """ + Ensure any of the already parsed hashtags from this Post + have a corresponding Hashtag record. + """ + # Ensure hashtags + if self.hashtags: + for hashtag in self.hashtags: + tag, _ = Hashtag.objects.get_or_create( + hashtag=hashtag[: Hashtag.MAXIMUM_LENGTH], + ) + tag.transition_perform("outdated") + + def calculate_stats(self, save=True): + """ + Recalculates our stats dict + """ + from .models import PostInteraction + + self.stats = { + "likes": self.interactions.filter( + type=PostInteraction.Types.like, + state__in=["new", "fanned_out"], + ).count(), + "boosts": self.interactions.filter( + type=PostInteraction.Types.boost, + state__in=["new", "fanned_out"], + ).count(), + "replies": Post.objects.filter(in_reply_to=self.object_uri).count(), + } + if save: + self.save() + + +class EmojiQuerySet(models.QuerySet): + def usable(self, domain: Domain | None = None): + """ + Returns all usable emoji, optionally filtering by domain too. + """ + visible_q = models.Q(local=True) | models.Q(public=True) + if True: # Config.system.emoji_unreviewed_are_public: + visible_q |= models.Q(public__isnull=True) + qs = self.filter(visible_q) + + if domain: + if not domain.local: + qs = qs.filter(domain=domain) + + return qs + + +class EmojiManager(models.Manager): + def get_queryset(self): + return EmojiQuerySet(self.model, using=self._db) + + def usable(self, domain: Domain | None = None): + return self.get_queryset().usable(domain) + + +class Emoji(models.Model): + class Meta: + # managed = False + db_table = "activities_emoji" + + # Normalized Emoji without the ':' + shortcode = models.SlugField(max_length=100, db_index=True) + + domain = models.ForeignKey( + "takahe.Domain", null=True, blank=True, on_delete=models.CASCADE + ) + local = models.BooleanField(default=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + mimetype = models.CharField(max_length=200) + + # Files may not be populated if it's remote and not cached on our side yet + file = models.ImageField( + # upload_to=partial(upload_emoji_namer, "emoji"), + null=True, + blank=True, + ) + + # A link to the custom emoji + remote_url = models.CharField(max_length=500, blank=True, null=True) + + # Used for sorting custom emoji in the picker + category = models.CharField(max_length=100, blank=True, null=True) + + # State of this Emoji + # state = StateField(EmojiStates) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = EmojiManager() + + @classmethod + def emojis_from_content(cls, content: str, domain: Domain | None) -> list["Emoji"]: + """ + Return a parsed and sanitized of emoji found in content without + the surrounding ':'. + """ + emoji_hits = FediverseHtmlParser( + content, find_emojis=True, emoji_domain=domain + ).emojis + emojis = sorted({emoji for emoji in emoji_hits}) + q = models.Q(local=True) | models.Q(public=True) | models.Q(public__isnull=True) + if domain and not domain.local: + q = q & models.Q(domain=domain) + return list( + cls.objects.filter(local=(domain is None) or domain.local) + .filter(q) + .filter(shortcode__in=emojis) + ) + + @classmethod + @cached(cache=TTLCache(maxsize=1000, ttl=60)) + def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji | None": + """ + Given an emoji shortcode and optional domain, looks up the single + emoji and returns it. Raises Emoji.DoesNotExist if there isn't one. + """ + try: + if domain is None or domain.local: + return cls.objects.get(local=True, shortcode=shortcode) + else: + return cls.objects.get(domain=domain, shortcode=shortcode) + except Emoji.DoesNotExist: + return None + + @property + def fullcode(self): + return f":{self.shortcode}:" + + @property + def is_usable(self) -> bool: + """ + Return True if this Emoji is usable. + """ + return self.public or self.public is None + + def full_url(self, always_show=False) -> RelativeAbsoluteUrl: + if self.is_usable or always_show: + if self.file: + return AutoAbsoluteUrl(self.file.url) + elif self.remote_url: + return ProxyAbsoluteUrl( + f"/proxy/emoji/{self.pk}/", + remote_url=self.remote_url, + ) + return StaticAbsoluteUrl("img/blank-emoji-128.png") + + def as_html(self): + if self.is_usable: + return mark_safe( + f'Emoji {self.shortcode}' + ) + return self.fullcode + + +class HashtagQuerySet(models.QuerySet): + def public(self): + public_q = models.Q(public=True) + if True: # Config.system.hashtag_unreviewed_are_public: + public_q |= models.Q(public__isnull=True) + return self.filter(public_q) + + def hashtag_or_alias(self, hashtag: str): + return self.filter( + models.Q(hashtag=hashtag) | models.Q(aliases__contains=hashtag) + ) + + +class HashtagManager(models.Manager): + def get_queryset(self): + return HashtagQuerySet(self.model, using=self._db) + + def public(self): + return self.get_queryset().public() + + def hashtag_or_alias(self, hashtag: str): + return self.get_queryset().hashtag_or_alias(hashtag) + + +class Hashtag(models.Model): + class Meta: + # managed = False + db_table = "activities_hashtag" + + MAXIMUM_LENGTH = 100 + + # Normalized hashtag without the '#' + hashtag = models.SlugField(primary_key=True, max_length=100) + + # Friendly display override + name_override = models.CharField(max_length=100, null=True, blank=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + # State of this Hashtag + # state = StateField(HashtagStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # Metrics for this Hashtag + stats = models.JSONField(null=True, blank=True) + # Timestamp of last time the stats were updated + stats_updated = models.DateTimeField(null=True, blank=True) + + # List of other hashtags that are considered similar + aliases = models.JSONField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = HashtagManager() + + class urls(urlman.Urls): + view = "/tags/{self.hashtag}/" + follow = "/tags/{self.hashtag}/follow/" + unfollow = "/tags/{self.hashtag}/unfollow/" + admin = "/admin/hashtags/" + admin_edit = "{admin}{self.hashtag}/" + admin_enable = "{admin_edit}enable/" + admin_disable = "{admin_edit}disable/" + timeline = "/tags/{self.hashtag}/" + + hashtag_regex = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + def save(self, *args, **kwargs): + self.hashtag = self.hashtag.lstrip("#") + if self.name_override: + self.name_override = self.name_override.lstrip("#") + return super().save(*args, **kwargs) + + @property + def display_name(self): + return self.name_override or self.hashtag + + def __str__(self): + return self.display_name + + def usage_months(self, num: int = 12) -> dict[date, int]: + """ + Return the most recent num months of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 2: + year = int(parts[0]) + month = int(parts[1]) + results[date(year, month, 1)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def usage_days(self, num: int = 7) -> dict[date, int]: + """ + Return the most recent num days of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 3: + year = int(parts[0]) + month = int(parts[1]) + day = int(parts[2]) + results[date(year, month, day)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def to_mastodon_json(self, following: bool | None = None): + value = { + "name": self.hashtag, + "url": self.urls.view.full(), # type: ignore + "history": [], + } + + if following is not None: + value["following"] = following + + return value + + +class PostInteraction(models.Model): + """ + Handles both boosts and likes + """ + + class Types(models.TextChoices): + like = "like" + boost = "boost" + vote = "vote" + pin = "pin" + + id = models.BigIntegerField( + primary_key=True, + default=Snowflake.generate_post_interaction, + ) + + # The state the boost is in + # state = StateField(PostInteractionStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # The canonical object ID + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + # What type of interaction it is + type = models.CharField(max_length=100, choices=Types.choices) + + # The user who boosted/liked/etc. + identity = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # The post that was boosted/liked/etc + post = models.ForeignKey( + "takahe.Post", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # Used to store any interaction extra text value like the vote + # in the question/poll case + value = models.CharField(max_length=50, blank=True, null=True) + + # When the activity was originally created (as opposed to when we received it) + # Mastodon only seems to send this for boosts, not likes + published = models.DateTimeField(default=timezone.now) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "activities_postinteraction" + + +class Block(models.Model): + """ + When one user (the source) mutes or blocks another (the target) + """ + + # state = StateField(BlockStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_blocks", + ) + + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_blocks", + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + + # If it is a mute, we will stop delivering any activities from target to + # source, but we will still deliver activities from source to target. + # A full block (mute=False) stops activities both ways. + mute = models.BooleanField() + include_notifications = models.BooleanField(default=False) + + expires = models.DateTimeField(blank=True, null=True) + note = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_block" + + def __str__(self): + return f"#{self.pk}: {self.source} blocks {self.target}" + + ### Alternate fetchers/constructors ### + + @classmethod + def maybe_get( + cls, source, target, mute=False, require_active=False + ) -> Optional["Block"]: + """ + Returns a Block if it exists between source and target + """ + try: + if require_active: + return cls.objects.filter( + status__in=["new", "sent", "awaiting_expiry"] + ).get(source=source, target=target, mute=mute) + else: + return cls.objects.get(source=source, target=target, mute=mute) + except cls.DoesNotExist: + return None + + @classmethod + def create_local_block(cls, source, target) -> "Block": + """ + Creates or updates a full Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot block from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=False) + if block is not None: + if not block.state in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=False, + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block + + @classmethod + def create_local_mute( + cls, + source, + target, + duration=None, + include_notifications=False, + ) -> "Block": + """ + Creates or updates a muting Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot mute from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=True) + if block is not None: + if not block in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + if duration: + block.expires = timezone.now() + datetime.timedelta(seconds=duration) + block.include_notifications = include_notifications + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=True, + include_notifications=include_notifications, + expires=( + timezone.now() + datetime.timedelta(seconds=duration) + if duration + else None + ), + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block diff --git a/takahe/tests.py b/takahe/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/takahe/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/takahe/uris.py b/takahe/uris.py new file mode 100644 index 00000000..e686108b --- /dev/null +++ b/takahe/uris.py @@ -0,0 +1,89 @@ +import hashlib +import sys +from urllib.parse import urljoin + +from django.conf import settings +from django.contrib.staticfiles.storage import staticfiles_storage + + +class RelativeAbsoluteUrl: + """ + Represents a URL that can have both "relative" and "absolute" forms + for various use either locally or remotely. + """ + + absolute: str + relative: str + + def __init__(self, absolute: str, relative: str | None = None): + if "://" not in absolute: + raise ValueError(f"Absolute URL {absolute!r} is not absolute!") + self.absolute = absolute + self.relative = relative or absolute + + +class AutoAbsoluteUrl(RelativeAbsoluteUrl): + """ + Automatically makes the absolute variant by using either settings.MAIN_DOMAIN + or a passed identity's URI domain. + """ + + def __init__( + self, + relative: str, + identity=None, + ): + self.relative = relative + if identity: + absolute_prefix = f"https://{identity.domain.uri_domain}/" + else: + absolute_prefix = f"https://{settings.MAIN_DOMAIN}/" + self.absolute = urljoin(absolute_prefix, self.relative) + + +class ProxyAbsoluteUrl(AutoAbsoluteUrl): + """ + AutoAbsoluteUrl variant for proxy paths, that also attaches a remote URI hash + plus extension to the end if it can. + """ + + def __init__( + self, + relative: str, + identity=None, + remote_url: str | None = None, + ): + if remote_url: + # See if there is a file extension we can grab + extension = "bin" + remote_filename = remote_url.split("/")[-1] + if "." in remote_filename: + extension = remote_filename.split(".")[-1] + # When provided, attach a hash of the remote URL + # SHA1 chosen as it generally has the best performance in modern python, and security is not a concern + # Hash truncation is generally fine, as in the typical use case the hash is scoped to the identity PK. + relative += f"{hashlib.sha1(remote_url.encode('ascii')).hexdigest()[:10]}.{extension}" + super().__init__(relative, identity) + + +class StaticAbsoluteUrl(RelativeAbsoluteUrl): + """ + Creates static URLs given only the static-relative path + """ + + def __init__(self, path: str): + try: + static_url = staticfiles_storage.url(path) + except ValueError: + # Suppress static issues during the first collectstatic + # Yes, I know it's a big hack! Pull requests welcome :) + if "collectstatic" in sys.argv: + super().__init__("https://example.com/") + return + raise + if "://" in static_url: + super().__init__(static_url) + else: + super().__init__( + urljoin(f"https://{settings.MAIN_DOMAIN}/", static_url), static_url + ) diff --git a/takahe/utils.py b/takahe/utils.py new file mode 100644 index 00000000..f8498b9d --- /dev/null +++ b/takahe/utils.py @@ -0,0 +1,486 @@ +from typing import TYPE_CHECKING + +from django.conf import settings + +from .models import * + +if TYPE_CHECKING: + from users.models import APIdentity + from users.models import User as NeoUser + + +def _int(s: str): + try: + return int(s) + except: + return -1 + + +def _rating_to_emoji(score: int, star_mode=0): + """convert score(0~10) to mastodon star emoji code""" + if score is None or score == "" or score == 0: + return "" + solid_stars = score // 2 + half_star = int(bool(score % 2)) + empty_stars = 5 - solid_stars if not half_star else 5 - solid_stars - 1 + if star_mode == 1: + emoji_code = "🌕" * solid_stars + "🌗" * half_star + "🌑" * empty_stars + else: + emoji_code = ( + settings.STAR_SOLID * solid_stars + + settings.STAR_HALF * half_star + + settings.STAR_EMPTY * empty_stars + ) + emoji_code = emoji_code.replace("::", ": :") + emoji_code = " " + emoji_code + " " + return emoji_code + + +class Takahe: + Visibilities = Post.Visibilities + + @staticmethod + def get_domain(): + domain = settings.SITE_INFO["site_domain"] + d = Domain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + d = Domain.objects.create( + domain=domain, + local=True, + service_domain=None, + notes="NeoDB", + nodeinfo=None, + ) + return d + + @staticmethod + def get_node_name_for_domain(d: str): + domain = Domain.objects.filter(domain=d).first() + if domain and domain.nodeinfo: + return domain.nodeinfo.get("metadata", {}).get("nodeName") + + @staticmethod + def init_identity_for_local_user(u: "NeoUser"): + """ + When a new local NeoDB user is created, + create a takahe user with the NeoDB user pk, + create a takahe identity, + then create a NeoDB APIdentity with the takahe identity pk. + """ + from users.models import APIdentity + + if not u.username: + logger.warning(f"User {u} has no username") + return None + user = User.objects.filter(pk=u.pk).first() + handler = "@" + u.username + if not user: + logger.info(f"Creating takahe user {u}") + user = User.objects.create(pk=u.pk, email=handler) + else: + if user.email != handler: + logger.warning(f"Updating takahe user {u} email to {handler}") + user.email = handler + user.save() + domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"]) + identity = Identity.objects.filter(username=u.username, local=True).first() + if not identity: + logger.info(f"Creating takahe identity {u}@{domain}") + identity = Identity.objects.create( + actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/", + username=u.username, + domain=domain, + name=u.username, + local=True, + discoverable=not u.preference.no_anonymous_view, + ) + identity.generate_keypair() + if not user.identities.filter(pk=identity.pk).exists(): + user.identities.add(identity) + apidentity = APIdentity.objects.filter(pk=identity.pk).first() + if not apidentity: + logger.info(f"Creating APIdentity for {identity}") + apidentity = APIdentity.objects.create( + user=u, + id=identity.pk, + local=True, + username=u.username, + domain_name=domain.domain, + deleted=identity.deleted, + ) + elif apidentity.username != identity.username: + logger.warning( + f"Updating APIdentity {apidentity} username to {identity.username}" + ) + apidentity.username = identity.username + apidentity.save() + if u.identity != apidentity: + logger.warning(f"Linking user {u} identity to {apidentity}") + u.identity = apidentity + u.save(update_fields=["identity"]) + return apidentity + + @staticmethod + def get_identity(pk: int): + return Identity.objects.get(pk=pk) + + @staticmethod + def get_identity_by_local_user(u: "NeoUser"): + return ( + Identity.objects.filter(pk=u.identity.pk, local=True).first() + if u and u.is_authenticated and u.identity + else None + ) + + @staticmethod + def get_or_create_apidentity(identity: Identity): + from users.models import APIdentity + + apid = APIdentity.objects.filter(pk=identity.pk).first() + if not apid: + if identity.local: + raise ValueError(f"local takahe identity {identity} missing APIdentity") + if not identity.domain: + raise ValueError(f"remote takahe identity {identity} missing domain") + apid = APIdentity.objects.create( + id=identity.pk, + local=False, + username=identity.username, + domain_name=identity.domain.domain, + deleted=identity.deleted, + ) + return apid + + @staticmethod + def get_local_user_by_identity(identity: Identity): + from users.models import User as NeoUser + + return NeoUser.objects.get(identity_id=identity.pk) if identity.local else None + + @staticmethod + def get_following_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="accepted" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="accepted" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_following_request_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="pending_approval" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_requested_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="pending_approval" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def update_follow_state( + source_pk: int, target_pk: int, from_states: list[str], to_state: str + ): + follow = Follow.objects.filter(source_id=source_pk, target_id=target_pk).first() + if ( + follow + and (not from_states or follow.state in from_states) + and follow.state != to_state + ): + follow.state = to_state + follow.save() + return follow + + @staticmethod + def follow(source_pk: int, target_pk: int): + try: + follow = Follow.objects.get(source_id=source_pk, target_id=target_pk) + if follow.state != "accepted": + follow.state = "unrequested" + follow.save() + except Follow.DoesNotExist: + source = Identity.objects.get(pk=source_pk) + follow = Follow.objects.create( + source_id=source_pk, + target_id=target_pk, + boosts=True, + uri="", + state="unrequested", + ) + follow.uri = source.actor_uri + f"follow/{follow.pk}/" + follow.save() + + @staticmethod + def unfollow(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "undone") + # InboxMessage.create_internal( + # { + # "type": "ClearTimeline", + # "object": target_identity.pk, + # "actor": self.identity.pk, + # } + # ) + + @staticmethod + def accept_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "accepting") + + @staticmethod + def reject_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "rejecting") + + @staticmethod + def get_muting_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=True, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_blocking_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_rejecting_ids(identity_pk: int) -> list[int]: + pks1 = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + pks2 = Block.objects.filter( + target_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("source", flat=True) + return list(set(list(pks1) + list(pks2))) + + @staticmethod + def block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + source = Identity.objects.get(pk=source_pk) + if not source.local: + raise ValueError(f"Cannot block/mute from remote identity {source}") + with transaction.atomic(): + block, _ = Block.objects.update_or_create( + defaults={"state": "new"}, + source_id=source_pk, + target_id=target_pk, + mute=is_mute, + ) + if block.state != "new" or not block.uri: + block.state = "new" + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + if not is_mute: + Takahe.unfollow(source_pk, target_pk) + Takahe.reject_follow_request(target_pk, source_pk) + return block + + @staticmethod + def undo_block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + Block.objects.filter( + source_id=source_pk, target_id=target_pk, mute=is_mute + ).update(state="undone") + + @staticmethod + def block(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, False) + + @staticmethod + def unblock(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, False) + + @staticmethod + def mute(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, True) + + @staticmethod + def unmute(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, True) + + @staticmethod + def _force_state_cycle(): # for unit testing only + Follow.objects.filter( + state__in=["rejecting", "undone", "pending_removal"] + ).delete() + Follow.objects.all().update(state="accepted") + Block.objects.filter(state="new").update(state="sent") + Block.objects.exclude(state="sent").delete() + + @staticmethod + def post( + author_pk: int, + pre_conetent: str, + content: str, + visibility: Visibilities, + data: dict | None = None, + post_pk: int | None = None, + post_time: datetime.datetime | None = None, + ) -> int | None: + identity = Identity.objects.get(pk=author_pk) + post = ( + Post.objects.filter(author=identity, pk=post_pk).first() + if post_pk + else None + ) + if post: + post.edit_local( + pre_conetent, content, visibility=visibility, type_data=data + ) + else: + post = Post.create_local( + identity, + pre_conetent, + content, + visibility=visibility, + type_data=data, + published=post_time, + ) + return post.pk if post else None + + @staticmethod + def get_post_url(post_pk: int) -> str | None: + post = Post.objects.filter(pk=post_pk).first() if post_pk else None + return post.object_uri if post else None + + @staticmethod + def delete_mark(mark): + if mark.shelfmember and mark.shelfmember.post_id: + Post.objects.filter(pk=mark.shelfmember.post_id).update(state="deleted") + + @staticmethod + def post_mark(mark, share_as_new_post: bool): + from catalog.common import ItemCategory + from takahe.utils import Takahe + + user = mark.owner.user + tags = ( + "\n" + + user.preference.mastodon_append_tag.replace( + "[category]", str(ItemCategory(mark.item.category).label) + ) + if user.preference.mastodon_append_tag + else "" + ) + stars = _rating_to_emoji(mark.rating_grade, 0) + item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}" + + pre_conetent = ( + f'{mark.action_label}《{mark.item.display_title}》' + ) + content = f"{stars}\n{mark.comment_text or ''}{tags}" + data = { + "object": { + "relatedWith": [mark.item.ap_object_ref, mark.shelfmember.ap_object] + } + } + if mark.comment: + data["object"]["relatedWith"].append(mark.comment.ap_object) + if mark.rating: + data["object"]["relatedWith"].append(mark.rating.ap_object) + if mark.visibility == 1: + v = Takahe.Visibilities.followers + elif mark.visibility == 2: + v = Takahe.Visibilities.mentioned + elif user.preference.mastodon_publish_public: + v = Takahe.Visibilities.public + else: + v = Takahe.Visibilities.unlisted + post_pk = Takahe.post( + mark.owner.pk, + pre_conetent, + content, + v, + data, + None if share_as_new_post else mark.shelfmember.post_id, + mark.shelfmember.created_time, + ) + if post_pk != mark.shelfmember.post_id: + mark.shelfmember.post_id = post_pk + mark.shelfmember.save(update_fields=["post_id"]) + if mark.comment and post_pk != mark.comment.post_id: + mark.comment.post_id = post_pk + mark.comment.save(update_fields=["post_id"]) + if mark.rating and post_pk != mark.rating.post_id: + mark.rating.post_id = post_pk + mark.rating.save(update_fields=["post_id"]) + + @staticmethod + def interact_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + interaction = PostInteraction.objects.get_or_create( + type=type, + identity_id=identity_pk, + post=post, + )[0] + if interaction.state not in ["new", "fanned_out"]: + interaction.state = "new" + interaction.save() + post.calculate_stats() + return interaction + + @staticmethod + def uninteract_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + for interaction in PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ): + interaction.state = "undone" + interaction.save() + post.calculate_stats() + + @staticmethod + def like_post(post_pk: int, identity_pk: int): + return Takahe.interact_post(post_pk, identity_pk, "like") + + @staticmethod + def unlike_post(post_pk: int, identity_pk: int): + return Takahe.uninteract_post(post_pk, identity_pk, "like") + + @staticmethod + def post_liked_by(post_pk: int, identity_pk: int) -> bool: + interaction = Takahe.get_user_interaction(post_pk, identity_pk, "like") + return interaction is not None and interaction.state in ["new", "fanned_out"] + + @staticmethod + def get_user_interaction(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return None + return PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ).first() + + @staticmethod + def get_post_stats(post_pk: int) -> dict: + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return {} + return post.stats or {} diff --git a/takahe/views.py b/takahe/views.py new file mode 100644 index 00000000..91ea44a2 --- /dev/null +++ b/takahe/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/users/account.py b/users/account.py index 1f6499bb..5a6c1eb4 100644 --- a/users/account.py +++ b/users/account.py @@ -396,6 +396,7 @@ def register(request): ) messages.add_message(request, messages.INFO, _("已发送验证邮件,请查收。")) if username_changed: + request.user.initiatialize() messages.add_message(request, messages.INFO, _("用户名已设置。")) if email_cleared: messages.add_message(request, messages.INFO, _("电子邮件地址已取消关联。")) @@ -480,9 +481,9 @@ def auth_logout(request): def clear_data_task(user_id): user = User.objects.get(pk=user_id) user_str = str(user) - remove_data_by_user(user) + if user.identity: + remove_data_by_user(user.identity) user.clear() - user.save() logger.warning(f"User {user_str} data cleared.") diff --git a/users/management/commands/refresh_following.py b/users/management/commands/refresh_following.py deleted file mode 100644 index c57329d2..00000000 --- a/users/management/commands/refresh_following.py +++ /dev/null @@ -1,21 +0,0 @@ -from datetime import timedelta - -from django.core.management.base import BaseCommand -from django.utils import timezone -from tqdm import tqdm - -from users.models import User - - -class Command(BaseCommand): - help = "Refresh following data for all users" - - def handle(self, *args, **options): - count = 0 - for user in tqdm(User.objects.all()): - user.following = user.merged_following_ids() - if user.following: - count += 1 - user.save(update_fields=["following"]) - - print(f"{count} users updated") diff --git a/users/migrations/0012_apidentity.py b/users/migrations/0012_apidentity.py new file mode 100644 index 00000000..ab04b30e --- /dev/null +++ b/users/migrations/0012_apidentity.py @@ -0,0 +1,63 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:37 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + # replaces = [ + # ("users", "0012_user_local"), + # ("users", "0013_user_identity"), + # ("users", "0014_remove_user_identity_apidentity_user"), + # ("users", "0015_alter_apidentity_user"), + # ] + + dependencies = [ + ("users", "0011_preference_hidden_categories"), + ("takahe", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="APIdentity", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("local", models.BooleanField()), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ( + "domain_name", + models.CharField(blank=True, max_length=500, null=True), + ), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="identity", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "indexes": [ + models.Index( + fields=["local", "username"], + name="users_apide_local_2d8170_idx", + ), + models.Index( + fields=["domain_name", "username"], + name="users_apide_domain__53ffa5_idx", + ), + ], + }, + ), + ] diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py new file mode 100644 index 00000000..551faeee --- /dev/null +++ b/users/migrations/0013_init_identity.py @@ -0,0 +1,77 @@ +# Generated by Django 4.2.4 on 2023-08-09 16:54 + +from django.conf import settings +from django.db import migrations, models, transaction +from loguru import logger +from tqdm import tqdm + +from takahe.models import Domain as TakaheDomain +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser + +domain = settings.SITE_INFO["site_domain"] +service_domain = settings.SITE_INFO.get("site_service_domain") + + +def init_domain(apps, schema_editor): + d = TakaheDomain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + TakaheDomain.objects.create( + domain=domain, + local=True, + service_domain=service_domain, + notes="NeoDB", + nodeinfo={}, + ) + else: + logger.info(f"Takahe domain {domain} already exists") + + +def init_identity(apps, schema_editor): + User = apps.get_model("users", "User") + APIdentity = apps.get_model("users", "APIdentity") + tdomain = TakaheDomain.objects.filter(domain=domain).first() + if User.objects.filter(username__isnull=True).exists(): + raise ValueError("null username detected, aborting migration") + if TakaheUser.objects.exists(): + raise ValueError("existing Takahe users detected, aborting migration") + if TakaheIdentity.objects.exists(): + raise ValueError("existing Takahe identities detected, aborting migration") + if APIdentity.objects.exists(): + raise ValueError("existing APIdentity data detected, aborting migration") + logger.info(f"Creating takahe users/identities") + for user in tqdm(User.objects.all()): + username = user.username + handler = "@" + username + identity = APIdentity.objects.create( + pk=user.pk, + user=user, + local=True, + username=username, + domain_name=domain, + deleted=None if user.is_active else user.updated, + ) + takahe_user = TakaheUser.objects.create(pk=user.pk, email=handler) + takahe_identity = TakaheIdentity.objects.create( + pk=user.pk, + actor_uri=f"https://{service_domain or domain}/@{username}@{domain}/", + username=username, + domain=tdomain, + name=username, + local=True, + discoverable=not user.preference.no_anonymous_view, + ) + takahe_user.identities.add(takahe_identity) + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0012_apidentity"), + ] + + operations = [ + migrations.RunPython(init_domain), + migrations.RunPython(init_identity), + ] diff --git a/users/models/__init__.py b/users/models/__init__.py index d1e45854..14d42a2e 100644 --- a/users/models/__init__.py +++ b/users/models/__init__.py @@ -1,3 +1,4 @@ +from .apidentity import APIdentity from .preference import Preference from .report import Report from .user import User diff --git a/users/models/apidentity.py b/users/models/apidentity.py new file mode 100644 index 00000000..a22c68d2 --- /dev/null +++ b/users/models/apidentity.py @@ -0,0 +1,192 @@ +from functools import cached_property + +from django.conf import settings +from django.db import models +from loguru import logger + +from takahe.utils import Takahe + +from .user import User + + +class APIdentity(models.Model): + """ + An identity/actor in ActivityPub service. + + This model is used as 1:1 mapping to Takahe Identity Model + """ + + user = models.OneToOneField("User", models.CASCADE, related_name="identity") + local = models.BooleanField() + username = models.CharField(max_length=500, blank=True, null=True) + domain_name = models.CharField(max_length=500, blank=True, null=True) + deleted = models.DateTimeField(null=True, blank=True) + + class Meta: + indexes = [ + models.Index(fields=["local", "username"]), + models.Index(fields=["domain_name", "username"]), + ] + + @cached_property + def takahe_identity(self): + return Takahe.get_identity(self.pk) + + @property + def is_active(self): + return self.user.is_active and self.takahe_identity.deleted is None + + @property + def name(self): + return self.takahe_identity.name + + @property + def discoverable(self): + return self.takahe_identity.discoverable + + @property + def actor_uri(self): + return self.takahe_identity.actor_uri + + @property + def icon_uri(self): + return self.takahe_identity.icon_uri + + @property + def display_name(self): + return self.takahe_identity.name + + @property + def avatar(self): + return self.user.avatar # FiXME + + @property + def url(self): + return f"/users/{self.handler}/" + + @property + def preference(self): + return self.user.preference + + @property + def handler(self): + if self.local: + return self.username + else: + return f"{self.username}@{self.domain_name}" + + @property + def following(self): + return Takahe.get_following_ids(self.pk) + + @property + def muting(self): + return Takahe.get_muting_ids(self.pk) + + @property + def blocking(self): + return Takahe.get_blocking_ids(self.pk) + + @property + def rejecting(self): + return Takahe.get_rejecting_ids(self.pk) + + @property + def ignoring(self): + return self.muting + self.rejecting + + def follow(self, target: "APIdentity"): + Takahe.follow(self.pk, target.pk) + + def unfollow(self, target: "APIdentity"): # this also cancels follow request + Takahe.unfollow(self.pk, target.pk) + + def requested_followers(self): + Takahe.get_requested_follower_ids(self.pk) + + def following_request(self): + Takahe.get_following_request_ids(self.pk) + + def accept_follow_request(self, target: "APIdentity"): + Takahe.accept_follow_request(self.pk, target.pk) + + def reject_follow_request(self, target: "APIdentity"): + Takahe.reject_follow_request(self.pk, target.pk) + + def block(self, target: "APIdentity"): + Takahe.block(self.pk, target.pk) + + def unblock(self, target: "APIdentity"): + Takahe.unblock(self.pk, target.pk) + + def mute(self, target: "APIdentity"): + Takahe.mute(self.pk, target.pk) + + def unmute(self, target: "APIdentity"): + Takahe.unmute(self.pk, target.pk) + + def is_rejecting(self, target: "APIdentity"): + return self != target and ( + target.is_blocked_by(self) or target.is_blocking(self) + ) + + def is_blocking(self, target: "APIdentity"): + return target.pk in self.blocking + + def is_blocked_by(self, target: "APIdentity"): + return target.is_blocking(self) + + def is_muting(self, target: "APIdentity"): + return target.pk in self.muting + + def is_following(self, target: "APIdentity"): + return target.pk in self.following + + def is_followed_by(self, target: "APIdentity"): + return target.is_following(self) + + def is_visible_to_user(self, viewing_user: User): + return ( + viewing_user.is_authenticated + or viewing_user == self.user + or ( + not self.is_blocking(viewing_user.identity) + and not self.is_blocked_by(viewing_user.identity) + ) + ) + + @classmethod + def get_by_handler(cls, handler: str) -> "APIdentity": + s = handler.split("@") + if len(s) == 1: + return cls.objects.get(username=s[0], local=True, deleted__isnull=True) + elif len(s) == 2: + return cls.objects.get( + user__mastodon_username=s[0], + user__mastodon_site=s[1], + deleted__isnull=True, + ) + elif len(s) == 3 and s[0] == "": + return cls.objects.get( + username=s[0], domain_name=s[1], local=False, deleted__isnull=True + ) + else: + raise cls.DoesNotExist(f"Invalid handler {handler}") + + @cached_property + def activity_manager(self): + from social.models import ActivityManager + + return ActivityManager(self) + + @cached_property + def shelf_manager(self): + from journal.models import ShelfManager + + return ShelfManager(self) + + @cached_property + def tag_manager(self): + from journal.models import TagManager + + return TagManager(self) diff --git a/users/models/preference.py b/users/models/preference.py index 6cc96ef9..ac5a0b45 100644 --- a/users/models/preference.py +++ b/users/models/preference.py @@ -20,6 +20,7 @@ from common.utils import GenerateDateUUIDMediaFilePath from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe from .user import User diff --git a/users/models/report.py b/users/models/report.py index caabd49c..4a65d2a2 100644 --- a/users/models/report.py +++ b/users/models/report.py @@ -1,24 +1,9 @@ -import hashlib -import re -from functools import cached_property - from django.conf import settings -from django.contrib.auth.models import AbstractUser -from django.core import validators -from django.core.exceptions import ValidationError -from django.core.serializers.json import DjangoJSONEncoder from django.db import models -from django.db.models import F, Q, Value -from django.db.models.functions import Concat, Lower -from django.templatetags.static import static -from django.urls import reverse -from django.utils import timezone -from django.utils.deconstruct import deconstructible from django.utils.translation import gettext_lazy as _ from loguru import logger from common.utils import GenerateDateUUIDMediaFilePath -from management.models import Announcement from mastodon.api import * from .user import User diff --git a/users/models/user.py b/users/models/user.py index 7f77db0c..68a8c322 100644 --- a/users/models/user.py +++ b/users/models/user.py @@ -5,10 +5,9 @@ from django.contrib.auth.models import AbstractUser from django.contrib.auth.validators import UnicodeUsernameValidator -from django.core import validators from django.core.exceptions import ValidationError from django.db import models -from django.db.models import F, Q, Value +from django.db.models import F, Manager, Q, Value from django.db.models.functions import Concat, Lower from django.templatetags.static import static from django.urls import reverse @@ -19,8 +18,10 @@ from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe if TYPE_CHECKING: + from .apidentity import APIdentity from .preference import Preference _RESERVED_USERNAMES = [ @@ -48,6 +49,7 @@ def __call__(self, value): class User(AbstractUser): + identity: "APIdentity" preference: "Preference" username_validator = UsernameValidator() username = models.CharField( @@ -142,15 +144,6 @@ class Meta: ), ] - @staticmethod - def register(**param): - from .preference import Preference - - new_user = User(**param) - new_user.save() - Preference.objects.create(user=new_user) - return new_user - @cached_property def mastodon_acct(self): return ( @@ -185,7 +178,9 @@ def avatar(self): @property def handler(self): - return self.mastodon_acct or self.username or f"~{self.pk}" + return ( + f"{self.username}" if self.username else self.mastodon_acct or f"~{self.pk}" + ) @property def url(self): @@ -194,105 +189,6 @@ def url(self): def __str__(self): return f'{self.pk}:{self.username or ""}:{self.mastodon_acct}' - @property - def ignoring(self): - return self.muting + self.rejecting - - def follow(self, target: "User"): - if ( - target is None - or target.locked - or self.is_following(target) - or self.is_blocking(target) - or self.is_blocked_by(target) - ): - return False - self.local_following.add(target) - self.following.append(target.pk) - self.save(update_fields=["following"]) - return True - - def unfollow(self, target: "User"): - if target and target in self.local_following.all(): - self.local_following.remove(target) - if ( - target.pk in self.following - and target.mastodon_acct not in self.mastodon_following - ): - self.following.remove(target.pk) - self.save(update_fields=["following"]) - return True - return False - - def remove_follower(self, target: "User"): - if target is None or self not in target.local_following.all(): - return False - target.local_following.remove(self) - if ( - self.pk in target.following - and self.mastodon_acct not in target.mastodon_following - ): - target.following.remove(self.pk) - target.save(update_fields=["following"]) - return True - - def block(self, target: "User"): - if target is None or target in self.local_blocking.all(): - return False - self.local_blocking.add(target) - if target.pk in self.following: - self.following.remove(target.pk) - self.save(update_fields=["following"]) - if self.pk in target.following: - target.following.remove(self.pk) - target.save(update_fields=["following"]) - if target in self.local_following.all(): - self.local_following.remove(target) - if self in target.local_following.all(): - target.local_following.remove(self) - if target.pk not in self.rejecting: - self.rejecting.append(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk not in target.rejecting: - target.rejecting.append(self.pk) - target.save(update_fields=["rejecting"]) - return True - - def unblock(self, target: "User"): - if target and target in self.local_blocking.all(): - self.local_blocking.remove(target) - if not self.is_blocked_by(target): - if target.pk in self.rejecting: - self.rejecting.remove(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk in target.rejecting: - target.rejecting.remove(self.pk) - target.save(update_fields=["rejecting"]) - return True - return False - - def mute(self, target: "User"): - if ( - target is None - or target in self.local_muting.all() - or target.mastodon_acct in self.mastodon_mutes - ): - return False - self.local_muting.add(target) - if target.pk not in self.muting: - self.muting.append(target.pk) - self.save() - return True - - def unmute(self, target: "User"): - if target and target in self.local_muting.all(): - self.local_muting.remove(target) - if target.pk in self.muting: - self.muting.remove(target.pk) - self.save() - return True - return False - def clear(self): if self.mastodon_site == "removed" and not self.is_active: return @@ -313,40 +209,13 @@ def clear(self): self.mastodon_blocks = [] self.mastodon_domain_blocks = [] self.mastodon_account = {} + self.save() + self.identity.deleted = timezone.now() + self.identity.save() - def merge_relationships(self): - self.muting = self.merged_muting_ids() - self.rejecting = self.merged_rejecting_ids() - # caculate following after rejecting is merged - self.following = self.merged_following_ids() - - @classmethod - def merge_rejected_by(cls): - """ - Caculate rejecting field to include blocked by for external users - Should be invoked after invoking merge_relationships() for all users - """ - # FIXME this is quite inifficient, should only invoked in async task - external_users = list( - cls.objects.filter(mastodon_username__isnull=False, is_active=True) - ) - reject_changed = [] - follow_changed = [] - for u in external_users: - for v in external_users: - if v.pk in u.rejecting and u.pk not in v.rejecting: - v.rejecting.append(u.pk) - if v not in reject_changed: - reject_changed.append(v) - if u.pk in v.following: - v.following.remove(u.pk) - if v not in follow_changed: - follow_changed.append(v) - for u in reject_changed: - u.save(update_fields=["rejecting"]) - for u in follow_changed: - u.save(update_fields=["following"]) - return len(follow_changed) + len(reject_changed) + def sync_relationships(self): + # FIXME + pass def refresh_mastodon_data(self): """Try refresh account data from mastodon server, return true if refreshed successfully, note it will not save to db""" @@ -390,112 +259,13 @@ def refresh_mastodon_data(self): self.mastodon_domain_blocks = get_related_acct_list( self.mastodon_site, self.mastodon_token, "/api/v1/domain_blocks" ) - self.merge_relationships() + self.sync_relationships() updated = True elif code == 401: logger.error(f"Refresh mastodon data error 401 for {self}") self.mastodon_token = "" return updated - def merged_following_ids(self): - fl = [] - for m in self.mastodon_following: - target = User.get(m) - if target and ( - (not target.mastodon_locked) - or self.mastodon_acct in target.mastodon_followers - ): - fl.append(target.pk) - for user in self.local_following.all(): - if user.pk not in fl and not user.locked and not user.is_blocking(self): - fl.append(user.pk) - fl = [x for x in fl if x not in self.rejecting] - return sorted(fl) - - def merged_muting_ids(self): - external_muting_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_mutes) - .values_list("pk", flat=True) - ) - l = list( - set( - external_muting_user_ids - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def merged_rejecting_ids(self): - domain_blocked_user_ids = list( - User.objects.filter( - mastodon_site__in=self.mastodon_domain_blocks - ).values_list("pk", flat=True) - ) - external_blocking_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_blocks) - .values_list("pk", flat=True) - ) - l = list( - set( - domain_blocked_user_ids - + external_blocking_user_ids - + list(self.local_blocking.all().values_list("pk", flat=True)) - + list(self.local_blocked_by.all().values_list("pk", flat=True)) # type: ignore - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def is_blocking(self, target): - return ( - ( - target in self.local_blocking.all() - or target.mastodon_acct in self.mastodon_blocks - or target.mastodon_site in self.mastodon_domain_blocks - ) - if target.is_authenticated - else self.preference.no_anonymous_view - ) - - def is_blocked_by(self, target): - return target.is_authenticated and target.is_blocking(self) - - def is_muting(self, target): - return target.pk in self.muting or target.mastodon_acct in self.mastodon_mutes - - def is_following(self, target): - return ( - self.mastodon_acct in target.mastodon_followers - if target.locked - else target.pk in self.following - # or target.mastodon_acct in self.mastodon_following - # or self.mastodon_acct in target.mastodon_followers - ) - - def is_followed_by(self, target): - return target.is_following(self) - - def get_mark_for_item(self, item): - params = {item.__class__.__name__.lower() + "_id": item.id, "owner": self} - mark = item.mark_class.objects.filter(**params).first() - return mark - - def get_max_visibility(self, viewer): - if not viewer.is_authenticated: - return 0 - elif viewer == self: - return 2 - elif viewer.is_blocked_by(self): - return -1 - elif viewer.is_following(self): - return 1 - else: - return 0 - @property def unread_announcements(self): unread_announcements = Announcement.objects.filter( @@ -503,59 +273,71 @@ def unread_announcements(self): ).order_by("-pk") return unread_announcements + @property + def activity_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.activity_manager + + @property + def shelf_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.shelf_manager + + @property + def tag_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.tag_manager + @classmethod def get(cls, name, case_sensitive=False): if isinstance(name, str): - sp = name.split("@") if name.startswith("~"): try: query_kwargs = {"pk": int(name[1:])} except: return None - elif len(sp) == 1: - query_kwargs = { - "username__iexact" if case_sensitive else "username": name - } - elif len(sp) == 2: + elif name.startswith("@"): query_kwargs = { - "mastodon_username__iexact" - if case_sensitive - else "mastodon_username": sp[0], - "mastodon_site__iexact" - if case_sensitive - else "mastodon_site": sp[1], + "username__iexact" if case_sensitive else "username": name[1:] } else: - return None + sp = name.split("@") + if len(sp) == 2: + query_kwargs = { + "mastodon_username__iexact" + if case_sensitive + else "mastodon_username": sp[0], + "mastodon_site__iexact" + if case_sensitive + else "mastodon_site": sp[1], + } + else: + return None elif isinstance(name, int): query_kwargs = {"pk": name} else: return None return User.objects.filter(**query_kwargs).first() - @property - def tags(self): - from journal.models import TagManager - - return TagManager.all_tags_for_user(self) - - @cached_property - def tag_manager(self): - from journal.models import TagManager - - return TagManager.get_manager_for_user(self) + @classmethod + def register(cls, **param): + from .preference import Preference - @cached_property - def shelf_manager(self): - from journal.models import ShelfManager + new_user = cls(**param) + new_user.save() + Preference.objects.create(user=new_user) + if new_user.username: # TODO make username required in registeration + new_user.initialize() + return new_user - return ShelfManager.get_manager_for_user(self) + def initialize(self): + Takahe.init_identity_for_local_user(self) - @cached_property - def activity_manager(self): - from social.models import ActivityManager - return ActivityManager.get_manager_for_user(self) +# TODO the following models should be deprecated soon class Follow(models.Model): diff --git a/users/tasks.py b/users/tasks.py index 06d684a1..c6cb4c8a 100644 --- a/users/tasks.py +++ b/users/tasks.py @@ -42,6 +42,4 @@ def refresh_all_mastodon_data_task(ttl_hours): else: logger.warning(f"Missing token for {user}") logger.info(f"{count} users updated") - c = User.merge_rejected_by() - logger.info(f"{c} users's rejecting list updated") logger.info(f"Mastodon data refresh done") diff --git a/users/tests.py b/users/tests.py index 3e801a29..e5513aaf 100644 --- a/users/tests.py +++ b/users/tests.py @@ -1,168 +1,70 @@ from django.test import TestCase +from takahe.utils import Takahe + from .models import * -from .models.user import Block, Follow, Mute class UserTest(TestCase): - def setUp(self): - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") + databases = "__all__" - def test_local_follow(self): - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_following_ids(), [self.bob.pk]) - self.assertEqual(self.alice.following, [self.bob.pk]) + def setUp(self): + self.alice = User.register( + mastodon_site="MySpace", mastodon_username="Alice", username="alice" + ).identity + self.bob = User.register( + mastodon_site="KKCity", mastodon_username="Bob", username="bob" + ).identity + + def test_follow(self): + self.alice.follow(self.bob) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_following(self.bob)) self.assertTrue(self.bob.is_followed_by(self.alice)) - - self.assertFalse(self.alice.follow(self.bob)) - self.assertEqual( - Follow.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertTrue(self.alice.unfollow(self.bob)) - self.assertFalse( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() - ) + self.alice.unfollow(self.bob) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_following(self.bob)) self.assertFalse(self.bob.is_followed_by(self.alice)) self.assertEqual(self.alice.following, []) - def test_locked(self): - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.follow(self.bob)) - self.bob.mastodon_locked = False - self.bob.save() - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.is_following(self.bob)) - - def test_external_follow(self): - self.alice.mastodon_following.append(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() - self.assertTrue(self.alice.is_following(self.bob)) - self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertFalse(self.alice.follow(self.bob)) - - self.alice.mastodon_following.remove(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() - self.assertFalse(self.alice.is_following(self.bob)) - self.assertEqual(self.alice.following, []) - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) - - def test_local_mute(self): + def test_mute(self): self.alice.mute(self.bob) - self.assertTrue(Mute.objects.filter(owner=self.alice, target=self.bob).exists()) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_muting(self.bob)) - - self.alice.mute(self.bob) - self.assertEqual( - Mute.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) self.assertEqual(self.alice.ignoring, [self.bob.pk]) + self.assertEqual(self.alice.rejecting, []) - self.alice.unmute(self.bob) - self.assertFalse( - Mute.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_external_mute(self): - self.alice.mastodon_mutes.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - - self.alice.mastodon_mutes.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_local_block_follow(self): - self.alice.block(self.bob) - self.assertEqual(self.bob.follow(self.alice), False) - self.alice.unblock(self.bob) - self.assertEqual(self.bob.follow(self.alice), True) - self.assertEqual(self.bob.following, [self.alice.pk]) + def test_block(self): self.alice.block(self.bob) - self.assertEqual(self.bob.following, []) - - def test_local_block(self): - self.alice.block(self.bob) - self.assertTrue( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_blocking(self.bob)) self.assertTrue(self.bob.is_blocked_by(self.alice)) - - self.alice.block(self.bob) - self.assertEqual( - Block.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) + self.assertEqual(self.alice.rejecting, [self.bob.pk]) self.assertEqual(self.alice.ignoring, [self.bob.pk]) self.alice.unblock(self.bob) - self.assertFalse( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_blocking(self.bob)) self.assertFalse(self.bob.is_blocked_by(self.alice)) + self.assertEqual(self.alice.rejecting, []) self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - - def test_external_block(self): - self.bob.follow(self.alice) - self.assertEqual(self.bob.following, [self.alice.pk]) - self.alice.mastodon_blocks.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(self.bob.following, [self.alice.pk]) - self.assertEqual(self.bob.rejecting, []) - self.assertEqual(User.merge_rejected_by(), 2) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - self.assertEqual(self.bob.following, []) - - self.alice.mastodon_blocks.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - def test_external_domain_block(self): - self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(User.merge_rejected_by(), 1) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - - self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) + # def test_external_domain_block(self): + # self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) + # self.alice.save() + # self.assertTrue(self.alice.is_blocking(self.bob)) + # self.assertTrue(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) + # self.alice.merge_relationships() + # self.assertEqual(self.alice.rejecting, [self.bob.pk]) + # self.alice.save() + # self.assertEqual(User.merge_rejected_by(), 1) + # self.bob.refresh_from_db() + # self.assertEqual(self.bob.rejecting, [self.alice.pk]) + + # self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) + # self.assertFalse(self.alice.is_blocking(self.bob)) + # self.assertFalse(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), []) From 31ba8862109b0b5aad61a53d595c5e3b6f297d70 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 13 Aug 2023 00:20:05 -0400 Subject: [PATCH 03/43] cleanup migration --- ..._remove_reply_reply_to_content_and_more.py | 6 +- ...r_collection_featured_by_users_and_more.py | 2 +- .../0015_use_identity_support_remote_piece.py | 155 ++++++++++++++++++ 3 files changed, 160 insertions(+), 3 deletions(-) create mode 100644 journal/migrations/0015_use_identity_support_remote_piece.py diff --git a/journal/migrations/0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more.py b/journal/migrations/0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more.py index 15006f2d..6c6036a9 100644 --- a/journal/migrations/0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more.py +++ b/journal/migrations/0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more.py @@ -4,9 +4,11 @@ class Migration(migrations.Migration): - dependencies = [ - ("journal", "0013_remove_comment_focus_item"), + ( + "journal", + "0013_remove_comment_focus_item", + ), ] operations = [ diff --git a/journal/migrations/0015_alter_collection_featured_by_users_and_more.py b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py index ad0a7d72..52726daf 100644 --- a/journal/migrations/0015_alter_collection_featured_by_users_and_more.py +++ b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py @@ -6,7 +6,7 @@ class Migration(migrations.Migration): dependencies = [ - ("users", "0012_apidentity"), + ("users", "0013_init_identity"), ("journal", "0014_alter_piece_options_piece_local_piece_post_id_and_more"), ] diff --git a/journal/migrations/0015_use_identity_support_remote_piece.py b/journal/migrations/0015_use_identity_support_remote_piece.py new file mode 100644 index 00000000..0ed2b9c6 --- /dev/null +++ b/journal/migrations/0015_use_identity_support_remote_piece.py @@ -0,0 +1,155 @@ +# Generated by Django 4.2.4 on 2023-08-13 11:39 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + replaces = [ + ("journal", "0014_alter_piece_options_piece_local_piece_post_id_and_more"), + ("journal", "0015_alter_collection_featured_by_users_and_more"), + ] + + dependencies = [ + ( + "journal", + "0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more", + ), + ("users", "0013_init_identity"), + ] + + operations = [ + migrations.AlterModelOptions( + name="piece", + options={}, + ), + migrations.AddField( + model_name="piece", + name="local", + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name="piece", + name="post_id", + field=models.BigIntegerField(default=None, null=True), + ), + migrations.AddField( + model_name="comment", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="rating", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="review", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddIndex( + model_name="piece", + index=models.Index( + fields=["post_id"], name="journal_pie_post_id_6a74ff_idx" + ), + ), + migrations.RemoveField( + model_name="collection", + name="featured_by_users", + ), + migrations.AddField( + model_name="collection", + name="featured_by", + field=models.ManyToManyField( + related_name="featured_collections", + through="journal.FeaturedCollection", + to="users.apidentity", + ), + ), + migrations.AlterField( + model_name="collection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="collectionmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="comment", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="featuredcollection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="like", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="rating", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="review", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelf", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelflogentry", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelfmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tag", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tagmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + ] From c1ef7b3892aa2f793030d7ad209bba440fb8ea02 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 13 Aug 2023 18:00:10 -0400 Subject: [PATCH 04/43] fetch remote identity --- catalog/search/views.py | 20 ++---- catalog/views.py | 1 + common/templates/_header.html | 2 +- common/templates/_sidebar.html | 36 +++++----- common/templatetags/mastodon.py | 12 ++-- common/utils.py | 10 ++- journal/models/like.py | 12 ++-- journal/templates/profile.html | 31 +++++---- journal/views/collection.py | 3 + journal/views/common.py | 15 +++-- journal/views/profile.py | 14 ++-- journal/views/tag.py | 1 + takahe/ap_handlers.py | 2 +- takahe/migrations/0001_initial.py | 20 ++++++ takahe/models.py | 31 +++++++++ takahe/utils.py | 18 ++++- users/migrations/0012_apidentity.py | 3 +- users/migrations/0013_init_identity.py | 2 +- users/models/apidentity.py | 65 +++++++++++++++---- .../users/fetch_identity_failed.html | 4 ++ .../users/fetch_identity_pending.html | 42 ++++++++++++ .../users/fetch_identity_refresh.html | 5 ++ users/templates/users/profile_actions.html | 22 +++---- users/tests.py | 19 ++++++ users/urls.py | 1 + users/views.py | 39 ++++++++++- 26 files changed, 328 insertions(+), 102 deletions(-) create mode 100644 users/templates/users/fetch_identity_failed.html create mode 100644 users/templates/users/fetch_identity_pending.html create mode 100644 users/templates/users/fetch_identity_refresh.html diff --git a/catalog/search/views.py b/catalog/search/views.py index 4af3c393..605fc19c 100644 --- a/catalog/search/views.py +++ b/catalog/search/views.py @@ -1,13 +1,11 @@ -import hashlib import logging -import uuid +import re import django_rq from django.conf import settings from django.contrib.auth.decorators import login_required from django.core.cache import cache from django.core.exceptions import BadRequest -from django.http import HttpResponseRedirect from django.shortcuts import redirect, render from django.utils.translation import gettext_lazy as _ from rq.job import Job @@ -15,7 +13,8 @@ from catalog.common.models import ItemCategory, SiteName from catalog.common.sites import AbstractSite, SiteManager from common.config import PAGE_LINK_NUMBER -from common.utils import PageLinksGenerator +from common.utils import HTTPResponseHXRedirect, PageLinksGenerator +from users.views import query_identity from ..models import * from .external import ExternalSources @@ -24,16 +23,7 @@ _logger = logging.getLogger(__name__) -class HTTPResponseHXRedirect(HttpResponseRedirect): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self["HX-Redirect"] = self["Location"] - - status_code = 200 - - def fetch_refresh(request, job_id): - retry = request.GET try: job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch")) item_url = job.return_value() @@ -102,6 +92,9 @@ def visible_categories(request): def search(request): + keywords = request.GET.get("q", default="").strip() + if re.match(r"^[@@]", keywords): + return query_identity(request, keywords.replace("@", "@")) category = request.GET.get("c", default="all").strip().lower() hide_category = False if category == "all" or not category: @@ -115,7 +108,6 @@ def search(request): hide_category = True except: categories = visible_categories(request) - keywords = request.GET.get("q", default="").strip() tag = request.GET.get("tag", default="").strip() p = request.GET.get("page", default="1") p = int(p) if p.isdigit() else 1 diff --git a/catalog/views.py b/catalog/views.py index a9635c28..4e3bddfb 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -312,6 +312,7 @@ def discover(request): "discover.html", { "user": user, + "identity": user.identity, "gallery_list": gallery_list, "recent_podcast_episodes": recent_podcast_episodes, "books_in_progress": books_in_progress, diff --git a/common/templates/_header.html b/common/templates/_header.html index b1d33db2..62b7c5c6 100644 --- a/common/templates/_header.html +++ b/common/templates/_header.html @@ -15,7 +15,7 @@ +

+ + + diff --git a/journal/templatetags/user_actions.py b/journal/templatetags/user_actions.py index d1a68b5d..528a553b 100644 --- a/journal/templatetags/user_actions.py +++ b/journal/templatetags/user_actions.py @@ -38,5 +38,15 @@ def liked_piece(context, piece): user and user.is_authenticated and piece.post_id - and Takahe.get_user_interaction(piece.post_id, user, "like") + and Takahe.get_user_interaction(piece.post_id, user.identity.pk, "like") + ) + + +@register.simple_tag(takes_context=True) +def liked_post(context, post): + user = context["request"].user + return ( + user + and user.is_authenticated + and Takahe.post_liked_by(post.pk, user.identity.pk) ) diff --git a/journal/urls.py b/journal/urls.py index a220ff15..215151c2 100644 --- a/journal/urls.py +++ b/journal/urls.py @@ -23,6 +23,11 @@ def _get_all_shelf_types(): path("unlike/", unlike, name="unlike"), path("mark/", mark, name="mark"), path("comment/", comment, name="comment"), + path("piece//replies", piece_replies, name="piece_replies"), + path("post//replies", post_replies, name="post_replies"), + path("post//reply", post_reply, name="post_reply"), + path("post//like", post_like, name="post_like"), + path("post//unlike", post_unlike, name="post_unlike"), path("mark_log//", mark_log, name="mark_log"), path( "add_to_collection/", add_to_collection, name="add_to_collection" diff --git a/journal/views/__init__.py b/journal/views/__init__.py index 759efc54..aa58787f 100644 --- a/journal/views/__init__.py +++ b/journal/views/__init__.py @@ -25,6 +25,7 @@ user_mark_list, wish, ) +from .post import piece_replies, post_like, post_replies, post_reply, post_unlike from .profile import profile, user_calendar_data from .review import ReviewFeed, review_edit, review_retrieve, user_review_list from .tag import user_tag_edit, user_tag_list, user_tag_member_list diff --git a/journal/views/post.py b/journal/views/post.py new file mode 100644 index 00000000..cb9bd0eb --- /dev/null +++ b/journal/views/post.py @@ -0,0 +1,64 @@ +from django.contrib.auth.decorators import login_required +from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied +from django.shortcuts import get_object_or_404, redirect, render +from django.urls import reverse +from django.utils.translation import gettext_lazy as _ +from httpx import request + +from catalog.models import * +from common.utils import ( + AuthedHttpRequest, + PageLinksGenerator, + get_uuid_or_404, + target_identity_required, +) +from takahe.utils import Takahe + +from ..forms import * +from ..models import * + + +@login_required +def piece_replies(request: AuthedHttpRequest, piece_uuid: str): + piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) + if not piece.is_visible_to(request.user): + raise PermissionDenied() + replies = piece.get_replies(request.user.identity) + return render(request, "replies.html", {"post": piece.post, "replies": replies}) + + +@login_required +def post_replies(request: AuthedHttpRequest, post_id: int): + replies = Takahe.get_post_replies(post_id, request.user.identity.pk) + return render( + request, "replies.html", {"post": Takahe.get_post(post_id), "replies": replies} + ) + + +@login_required +def post_reply(request: AuthedHttpRequest, post_id: int): + content = request.POST.get("content", "").strip() + visibility = Takahe.Visibilities(int(request.POST.get("visibility", -1))) + if request.method != "POST" or not content: + raise BadRequest() + Takahe.reply_post(post_id, request.user.identity.pk, content, visibility) + replies = Takahe.get_post_replies(post_id, request.user.identity.pk) + return render( + request, "replies.html", {"post": Takahe.get_post(post_id), "replies": replies} + ) + + +@login_required +def post_like(request: AuthedHttpRequest, post_id: int): + if request.method != "POST": + raise BadRequest() + Takahe.like_post(post_id, request.user.identity.pk) + return render(request, "action_like_post.html", {"post": Takahe.get_post(post_id)}) + + +@login_required +def post_unlike(request: AuthedHttpRequest, post_id: int): + if request.method != "POST": + raise BadRequest() + Takahe.unlike_post(post_id, request.user.identity.pk) + return render(request, "action_like_post.html", {"post": Takahe.get_post(post_id)}) diff --git a/takahe/models.py b/takahe/models.py index 5fb05f5f..c5797595 100644 --- a/takahe/models.py +++ b/takahe/models.py @@ -22,7 +22,7 @@ from loguru import logger from lxml import etree -from .html import FediverseHtmlParser +from .html import ContentRenderer, FediverseHtmlParser from .uris import * if TYPE_CHECKING: @@ -419,6 +419,14 @@ def handle(self): return f"{self.username}@{self.domain_id}" return f"{self.username}@(unknown server)" + @property + def url(self): + return ( + f"/users/{self.username}/" + if self.local + else f"/users/@{self.username}@{self.domain_id}/" + ) + @property def user_pk(self): user = self.users.first() @@ -630,6 +638,101 @@ def __str__(self): return f"#{self.id}: {self.source} → {self.target}" +class PostQuerySet(models.QuerySet): + def not_hidden(self): + query = self.exclude(state__in=["deleted", "deleted_fanned_out"]) + return query + + def public(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + ], + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def local_public(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + ], + local=True, + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def unlisted(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + Post.Visibilities.unlisted, + ], + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def visible_to(self, identity: Identity | None, include_replies: bool = False): + if identity is None: + return self.unlisted(include_replies=include_replies) + query = self.filter( + models.Q( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + Post.Visibilities.unlisted, + ] + ) + | models.Q( + visibility=Post.Visibilities.followers, + author__inbound_follows__source=identity, + ) + | models.Q( + mentions=identity, + ) + | models.Q(author=identity) + ).distinct() + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + # def tagged_with(self, hashtag: str | Hashtag): + # if isinstance(hashtag, str): + # tag_q = models.Q(hashtags__contains=hashtag) + # else: + # tag_q = models.Q(hashtags__contains=hashtag.hashtag) + # if hashtag.aliases: + # for alias in hashtag.aliases: + # tag_q |= models.Q(hashtags__contains=alias) + # return self.filter(tag_q) + + +class PostManager(models.Manager): + def get_queryset(self): + return PostQuerySet(self.model, using=self._db) + + def not_hidden(self): + return self.get_queryset().not_hidden() + + def public(self, include_replies: bool = False): + return self.get_queryset().public(include_replies=include_replies) + + def local_public(self, include_replies: bool = False): + return self.get_queryset().local_public(include_replies=include_replies) + + def unlisted(self, include_replies: bool = False): + return self.get_queryset().unlisted(include_replies=include_replies) + + # def tagged_with(self, hashtag: str | Hashtag): + # return self.get_queryset().tagged_with(hashtag=hashtag) + + class Post(models.Model): """ A post (status, toot) that is either local or remote. @@ -739,6 +842,7 @@ class Types(models.TextChoices): created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) + objects = PostManager() class Meta: # managed = False @@ -810,7 +914,6 @@ def create_local( with transaction.atomic(): # Find mentions in this post mentions = cls.mentions_from_content(content, author) - # mentions = set() if reply_to: mentions.add(reply_to.author) # Maintain local-only for replies @@ -955,6 +1058,10 @@ def calculate_stats(self, save=True): if save: self.save() + @property + def safe_content_local(self): + return ContentRenderer(local=True).render_post(self.content, self) + class EmojiQuerySet(models.QuerySet): def usable(self, domain: Domain | None = None): @@ -1070,7 +1177,8 @@ def is_usable(self) -> bool: def full_url(self, always_show=False) -> RelativeAbsoluteUrl: if self.is_usable or always_show: if self.file: - return AutoAbsoluteUrl(self.file.url) + return AutoAbsoluteUrl(settings.TAKAHE_MEDIA_PREFIX + self.file.name) + # return AutoAbsoluteUrl(self.file.url) elif self.remote_url: return ProxyAbsoluteUrl( f"/proxy/emoji/{self.pk}/", diff --git a/takahe/utils.py b/takahe/utils.py index 6ec55ca1..82e128f7 100644 --- a/takahe/utils.py +++ b/takahe/utils.py @@ -342,6 +342,7 @@ def post( content: str, visibility: Visibilities, data: dict | None = None, + reply_to_pk: int | None = None, post_pk: int | None = None, post_time: datetime.datetime | None = None, ) -> int | None: @@ -351,6 +352,13 @@ def post( if post_pk else None ) + if post_pk and not post: + raise ValueError(f"Cannot find post to edit: {post_pk}") + reply_to_post = ( + Post.objects.filter(pk=reply_to_pk).first() if reply_to_pk else None + ) + if reply_to_pk and not reply_to_post: + raise ValueError(f"Cannot find post to reply: {reply_to_pk}") if post: post.edit_local( pre_conetent, content, visibility=visibility, type_data=data @@ -363,9 +371,14 @@ def post( visibility=visibility, type_data=data, published=post_time, + reply_to=reply_to_post, ) return post.pk if post else None + @staticmethod + def get_post(post_pk: int) -> str | None: + return Post.objects.filter(pk=post_pk).first() + @staticmethod def get_post_url(post_pk: int) -> str | None: post = Post.objects.filter(pk=post_pk).first() if post_pk else None @@ -465,6 +478,12 @@ def uninteract_post(post_pk: int, identity_pk: int, type: str): interaction.save() post.calculate_stats() + @staticmethod + def reply_post( + post_pk: int, identity_pk: int, content: str, visibility: Visibilities + ): + return Takahe.post(identity_pk, "", content, visibility, reply_to_pk=post_pk) + @staticmethod def like_post(post_pk: int, identity_pk: int): return Takahe.interact_post(post_pk, identity_pk, "like") @@ -497,3 +516,33 @@ def get_post_stats(post_pk: int) -> dict: logger.warning(f"Cannot find post {post_pk}") return {} return post.stats or {} + + @staticmethod + def get_post_replies(post_pk: int, identity_pk: int | None): + node = Post.objects.filter(pk=post_pk).first() + if not node: + return Post.objects.none() + identity = ( + Identity.objects.filter(pk=identity_pk).first() if identity_pk else None + ) + child_queryset = ( + Post.objects.not_hidden() + .prefetch_related( + # "attachments", + "mentions", + "emojis", + ) + .select_related( + "author", + "author__domain", + ) + .filter(in_reply_to=node.object_uri) + .order_by("published") + ) + if identity: + child_queryset = child_queryset.visible_to( + identity=identity, include_replies=True + ) + else: + child_queryset = child_queryset.unlisted(include_replies=True) + return child_queryset diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py index 40136a90..9729032d 100644 --- a/users/migrations/0013_init_identity.py +++ b/users/migrations/0013_init_identity.py @@ -52,7 +52,9 @@ def init_identity(apps, schema_editor): domain_name=domain, deleted=None if user.is_active else user.updated, ) - takahe_user = TakaheUser.objects.create(pk=user.pk, email=handler) + takahe_user = TakaheUser.objects.create( + pk=user.pk, email=handler, admin=user.is_superuser + ) takahe_identity = TakaheIdentity.objects.create( pk=user.pk, actor_uri=f"https://{service_domain or domain}/@{username}@{domain}/", diff --git a/users/models/apidentity.py b/users/models/apidentity.py index 27cc1cd3..c82d14cc 100644 --- a/users/models/apidentity.py +++ b/users/models/apidentity.py @@ -66,17 +66,18 @@ def icon_uri(self): def profile_uri(self): return self.takahe_identity.profile_uri - @property + @cached_property def display_name(self): return self.takahe_identity.name or self.username - @property + @cached_property def summary(self): return self.takahe_identity.summary or "" @property def avatar(self): - return self.takahe_identity.icon_uri or static("img/avatar.svg") # fixme + # return self.takahe_identity.icon_uri or static("img/avatar.svg") # fixme + return f"/proxy/identity_icon/{self.pk}/" @property def url(self): From ef58e00d2be2e04d41827ae94f9804af530c46c6 Mon Sep 17 00:00:00 2001 From: Your Name Date: Tue, 15 Aug 2023 23:46:00 -0400 Subject: [PATCH 09/43] init db with nodename --- takahe/models.py | 47 ++++++++++++++++++++++ users/migrations/0013_init_identity.py | 24 +++++++++++ users/templates/users/profile_actions.html | 2 +- 3 files changed, 72 insertions(+), 1 deletion(-) diff --git a/takahe/models.py b/takahe/models.py index c5797595..5a902907 100644 --- a/takahe/models.py +++ b/takahe/models.py @@ -1532,3 +1532,50 @@ def create_internal(cls, payload): "object": payload, } ) + + +class Config(models.Model): + """ + A configuration setting for either the server or a specific user or identity. + + The possible options and their defaults are defined at the bottom of the file. + """ + + key = models.CharField(max_length=500) + + user = models.ForeignKey( + User, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + identity = models.ForeignKey( + Identity, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + domain = models.ForeignKey( + Domain, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + json = models.JSONField(blank=True, null=True) + image = models.ImageField( + blank=True, + null=True, + ) + + class Meta: + # managed = False + db_table = "core_config" + unique_together = [ + ("key", "user", "identity", "domain"), + ] diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py index 9729032d..ba88a9fc 100644 --- a/users/migrations/0013_init_identity.py +++ b/users/migrations/0013_init_identity.py @@ -5,11 +5,13 @@ from loguru import logger from tqdm import tqdm +from takahe.models import Config as TakaheConfig from takahe.models import Domain as TakaheDomain from takahe.models import Identity as TakaheIdentity from takahe.models import User as TakaheUser domain = settings.SITE_INFO["site_domain"] +name = settings.SITE_INFO["site_name"] service_domain = settings.SITE_INFO.get("site_service_domain") @@ -27,6 +29,28 @@ def init_domain(apps, schema_editor): else: logger.info(f"Takahe domain {domain} already exists") + TakaheConfig.objects.update_or_create( + key="public_timeline", + user=None, + identity=None, + domain=None, + defaults={"json": False}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=None, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=domain, + defaults={"json": name}, + ) + def init_identity(apps, schema_editor): User = apps.get_model("users", "User") diff --git a/users/templates/users/profile_actions.html b/users/templates/users/profile_actions.html index 3f7cc7eb..7848d68f 100644 --- a/users/templates/users/profile_actions.html +++ b/users/templates/users/profile_actions.html @@ -28,7 +28,7 @@ + title="用户原始主页"> From 86384792405208c0a3e11e6996eb4b629cebd81d Mon Sep 17 00:00:00 2001 From: Your Name Date: Thu, 17 Aug 2023 18:54:00 -0400 Subject: [PATCH 10/43] add takahe as submodule --- .dockerignore | 10 ++ .gitmodules | 4 + .pre-commit-config.yaml | 1 - Dockerfile | 25 +++-- boofilsic/settings.py | 12 ++- catalog/views.py | 22 ++--- doc/install-docker.md | 104 ++++++++++++++++++++ doc/install.md | 6 +- docker-compose.yml | 130 ++++++++++++++++++++----- misc/neodb-manage | 2 + misc/nginx.conf.d/neodb.conf | 111 ++++++++++++++++++--- misc/takahe-manage | 2 + neodb-takahe | 1 + neodb.env.dist | 6 -- neodb.env.example | 23 +++++ pyproject.toml | 2 +- requirements.txt | 1 - takahe/migrations/0001_initial.py | 51 ++++++++++ takahe/utils.py | 8 +- users/migrations/0001_initial.py | 3 - users/migrations/0013_init_identity.py | 2 +- users/models/apidentity.py | 6 +- 22 files changed, 444 insertions(+), 88 deletions(-) create mode 100644 .dockerignore create mode 100644 .gitmodules create mode 100644 doc/install-docker.md create mode 100755 misc/neodb-manage create mode 100755 misc/takahe-manage create mode 160000 neodb-takahe delete mode 100644 neodb.env.dist create mode 100644 neodb.env.example diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..f76a0e48 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.DS_Store +.env +.venv +.vscode +.github +.git +__pycache__ +/doc +/media +/static diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..306daa43 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "neodb-takahe"] + path = neodb-takahe + url = git@github.com:alphatownsman/neodb-takahe.git + branch = neodb diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2f55a153..119a5f36 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,6 @@ repos: rev: 22.12.0 hooks: - id: black - language_version: python3.11 - repo: https://github.com/Riverside-Healthcare/djLint rev: v1.32.1 diff --git a/Dockerfile b/Dockerfile index a69d2b72..25decdc1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,14 @@ # syntax=docker/dockerfile:1 -FROM python:3.11-slim-bullseye +FROM python:3.11-slim ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 +RUN useradd -U app COPY . /neodb +RUN mkdir -p /www WORKDIR /neodb -RUN apt-get update \ +RUN mv neodb-takahe /takahe +RUN cp misc/neodb-manage misc/takahe-manage /bin +RUN --mount=type=cache,target=/var/cache/apt apt-get update \ && apt-get install -y --no-install-recommends \ build-essential \ libpq-dev \ @@ -13,10 +17,13 @@ RUN apt-get update \ nginx \ opencc \ git +RUN busybox --install COPY misc/nginx.conf.d/* /etc/nginx/conf.d/ -RUN echo >> /etc/nginx/nginx.conf -RUN echo 'daemon off;' >> /etc/nginx/nginx.conf -RUN python3 -m pip install --no-cache-dir --upgrade -r requirements.txt + +RUN --mount=type=cache,target=/root/.cache python3 -m pip install --upgrade -r requirements.txt + +RUN --mount=type=cache,target=/root/.cache cd /takahe && python3 -m pip install --upgrade -r requirements.txt + RUN apt-get purge -y --auto-remove \ build-essential \ libpq-dev \ @@ -24,8 +31,10 @@ RUN apt-get purge -y --auto-remove \ RUN python3 manage.py compilescss \ && python3 manage.py collectstatic --noinput -RUN cp -R misc/www /www -RUN mv static /www/s + +RUN cd /takahe && TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" python3 manage.py collectstatic --noinput + +USER app:app # invoke check by default -CMD [ "python3", "/neodb/manage.py", "check" ] +CMD [ "sh", "-c", 'python3 /neodb/manage.py check && TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" python3 manage.py collectstatic --noinput python3 /takahe/manage.py check' ] diff --git a/boofilsic/settings.py b/boofilsic/settings.py index 3f836c1f..f5424cbd 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -56,7 +56,6 @@ "polymorphic", "easy_thumbnails", "user_messages", - "fontawesomefree", # "anymail", # "silk", ] @@ -130,9 +129,9 @@ DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("NEODB_DB_NAME", "test"), - "USER": os.environ.get("NEODB_DB_USER", "postgres"), - "PASSWORD": os.environ.get("NEODB_DB_PASSWORD", "admin123"), + "NAME": os.environ.get("NEODB_DB_NAME", "test_neodb"), + "USER": os.environ.get("NEODB_DB_USER", "testuser"), + "PASSWORD": os.environ.get("NEODB_DB_PASSWORD", "testpass"), "HOST": os.environ.get("NEODB_DB_HOST", "127.0.0.1"), "PORT": int(os.environ.get("NEODB_DB_PORT", 5432)), "OPTIONS": { @@ -191,7 +190,6 @@ DATA_UPLOAD_MAX_MEMORY_SIZE = 100 * 1024 * 1024 CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True - if os.getenv("NEODB_SSL", "") != "": SECURE_SSL_REDIRECT = True SECURE_HSTS_PRELOAD = True @@ -238,6 +236,10 @@ # for sites migrated from previous version, either wipe mastodon client ids or use: # REDIRECT_URIS = f'{SITE_INFO["site_url"]}/users/OAuth2_login/' +CSRF_TRUSTED_ORIGINS = [SITE_INFO["site_url"]] +if DEBUG: + CSRF_TRUSTED_ORIGINS += ["http://127.0.0.1:8000", "http://localhost:8000"] + # Path to save report related images, ends with slash REPORT_MEDIA_PATH_ROOT = "report/" MARKDOWNX_MEDIA_PATH = "review/" diff --git a/catalog/views.py b/catalog/views.py index 4e3bddfb..1e65e0cc 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -19,7 +19,6 @@ ShelfMember, ShelfType, ShelfTypeNames, - q_item_in_category, q_piece_in_home_feed_of_user, q_piece_visible_to_user, ) @@ -259,12 +258,6 @@ def reviews(request, item_path, item_uuid): def discover(request): if request.method != "GET": raise BadRequest() - user = request.user - if user.is_authenticated: - layout = user.preference.discover_layout - else: - layout = [] - cache_key = "public_gallery" gallery_list = cache.get(cache_key, []) @@ -276,10 +269,12 @@ def discover(request): # ) # gallery["items"] = Item.objects.filter(id__in=ids) - if user.is_authenticated: + if request.user.is_authenticated: + layout = request.user.preference.discover_layout + identity = request.user.identity podcast_ids = [ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.Podcast ) ] @@ -289,7 +284,7 @@ def discover(request): books_in_progress = Edition.objects.filter( id__in=[ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.Book )[:10] ] @@ -297,22 +292,23 @@ def discover(request): tvshows_in_progress = Item.objects.filter( id__in=[ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.TV )[:10] ] ) else: + identity = None recent_podcast_episodes = [] books_in_progress = [] tvshows_in_progress = [] + layout = [] return render( request, "discover.html", { - "user": user, - "identity": user.identity, + "identity": identity, "gallery_list": gallery_list, "recent_podcast_episodes": recent_podcast_episodes, "books_in_progress": books_in_progress, diff --git a/doc/install-docker.md b/doc/install-docker.md new file mode 100644 index 00000000..d6465928 --- /dev/null +++ b/doc/install-docker.md @@ -0,0 +1,104 @@ +Run NeoDB in Docker +=================== + +## Overview +For small and medium NeoDB instances, it's recommended to deploy as a local container cluster with `docker-compose`. + +```mermaid +flowchart TB + web[[Your reverse proxy server with SSL]] --- neodb-nginx[nginx listening on localhost:8000] + subgraph Containers managed by docker-compose + neodb-nginx --- neodb-web + neodb-nginx --- takahe-web + neodb-worker --- typesense[(typesense)] + neodb-worker --- neodb-db[(neodb-db)] + neodb-worker --- redis[(redis)] + neodb-web --- typesense + neodb-web --- neodb-db + neodb-web --- redis + neodb-web --- takahe-db[(takahe-db)] + migration([migration]) --- neodb-db + migration --- takahe-db + takahe-web --- takahe-db + takahe-web --- redis + takahe-stator --- takahe-db + takahe-stator --- redis + end +``` + +As shown in the diagram, a reverse proxy server (e.g. nginx, or Cloudflare tunnel) will be required, it should have SSL configured and pointing to `http://localhost:8000`; the rest is handled by `docker-compose` and containers. + +## Install Docker and add user to docker group +Create a user (e.g. `neouser`) to run neodb, execute these as *root* : +``` +# apt install docker.io docker-compose +# adduser --ingroup docker neouser +``` + +## Get configuration files + - create a folder for configuration, eg ~/neodb/config + - grab `docker-compose.yml` and `neodb.env.example` from source code + - rename `neodb.env.example` to `.env` + +## Set up .env file +Change essential options like `NEODB_SITE_DOMAIN` in `.env` before starting the cluster for the first time. Changing them later may have unintended consequences, please make sure they are correct before exposing the service externally. + +- `NEODB_SITE_NAME` - name of your site +- `NEODB_SITE_DOMAIN` - domain name of your site +- `NEODB_SECRET_KEY` - encryption key of session data +- `NEODB_DATA` is the path to store db/media/cache, it's `../data` by default, but can be any path that's writable + +See `configuration.md` for more details + +## Start docker +in the folder with `docker-compose.yml` and `neodb.env`, execute as the user you just created: +``` +$ docker-compose pull +$ docker-compose up -d +``` + +In a minute or so, the site should be up at 127.0.0.1:8000 , you may check it with: +``` +$ curl http://localhost:8000/nodeinfo/2.0/ +``` + +JSON response will be returned if the server is up and running: +``` +{"version": "2.0", "software": {"name": "neodb", "version": "0.8-dev"}, "protocols": ["activitypub", "neodb"], "services": {"outbound": [], "inbound": []}, "usage": {"users": {"total": 1}, "localPosts": 0}, "openRegistrations": true, "metadata": {}} +``` + +## Make the site available publicly + +Next step is to expose `127.0.0.1:8000` to external network as `https://yourdomain.tld` . There are many ways to do it, you may use nginx as a reverse proxy with a ssl cert, or configure a CDN provider to handle the SSL. There's no detailed instruction yet but contributions are welcomed. + +NeoDB requires `https` by default. Although `http` may be technically possible, it's tedious to set up and not secure, hence not recommended. + +## Update NeoDB + +Check the release notes, update `docker-compose.yml` and `.env` as instructed. pull the image +``` +docker-compose pull +``` + +If there's no change in `docker-compose.yml`, restart only NeoDB services: +``` +$ docker-compose stop neodb-web neodb-worker neodb-worker-extra takahe-web takahe-stator nginx +$ docker-compose up -d +``` + +Otherwise restart the entire cluster: +``` +$ docker-compose down +$ docker-compose up -d +``` + +## Troubleshooting + + - `docker-compose ps` to see if any service is down, (btw it's normal that `migration` is in `Exit 0` state) + - `docker-compose run shell` to run a shell into the cluster; or `docker-compose run root` for root shell, and `apt` is available if extra package needed + +## Scaling + +If you are running a high-traffic instance, spin up `NEODB_WEB_WORKER_NUM`, `TAKAHE_WEB_WORKER_NUM`, `TAKAHE_STATOR_CONCURRENCY` and `TAKAHE_STATOR_CONCURRENCY_PER_MODEL` as long as your host server can handle them. + +Further scaling up with multiple nodes (e.g. via Kubernetes) is beyond the scope of this document, but consider run db/redis/typesense separately, and then duplicate web/worker/stator containers as long as connections and mounts are properly configured; `migration` only runs once when start or upgrade, it should be kept that way. diff --git a/doc/install.md b/doc/install.md index 336b6677..3bf3a789 100644 --- a/doc/install.md +++ b/doc/install.md @@ -20,11 +20,7 @@ This is a very basic guide with limited detail, contributions welcomed 0 Run in Docker --------------- -``` -cp neodb.env.dist neodb.env # update this configuration - -docker-compose up -``` +Recommended, see [Docker Installation](install-docker.md) 1 Manual Install ---------------- diff --git a/docker-compose.yml b/docker-compose.yml index 18caaa3c..d1d3ac98 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,49 +1,78 @@ -version: "3.4" +version: "3.8" # NEODB Docker Compose File # -# Note: configuration here may not be secure for production usage +# Note: may not be secure for production usage, use at your own risk +# +# The following env variable are expected from .env or command line +# - NEODB_SECRET_KEY +# - NEODB_SITE_DOMAIN +# - NEODB_SITE_NAME +# - NEODB_DATA x-shared: neodb-service: &neodb-service build: . - image: neodb:latest - env_file: - - neodb.env + image: nerodb/neodb:latest environment: - NEODB_DB_NAME=neodb - NEODB_DB_USER=neodb - NEODB_DB_PASSWORD=aubergine - NEODB_DB_HOST=neodb-db - NEODB_DB_PORT=5432 - - NEODB_REDIS_HOST=neodb-redis + - NEODB_REDIS_HOST=redis - NEODB_REDIS_PORT=6379 - NEODB_REDIS_DB=0 - NEODB_TYPESENSE_ENABLE=1 - - NEODB_TYPESENSE_HOST=neodb-search + - NEODB_TYPESENSE_HOST=typesense - NEODB_TYPESENSE_PORT=8108 - NEODB_TYPESENSE_KEY=eggplant - - NEODB_STATIC_ROOT=/www/static/ - - NEODB_MEDIA_ROOT=/www/media/ + - NEODB_FROM_EMAIL=no-reply@${NEODB_SITE_DOMAIN} + - NEODB_MEDIA_ROOT=/www/m/ + - TAKAHE_DB_NAME=takahe + - TAKAHE_DB_USER=takahe + - TAKAHE_DB_PASSWORD=aubergine + - TAKAHE_DB_HOST=takahe-db + - TAKAHE_DB_PORT=5432 + - TAKAHE_SECRET_KEY=${NEODB_SECRET_KEY} + - TAKAHE_MAIN_DOMAIN=${NEODB_SITE_DOMAIN} + - TAKAHE_MEDIA_URL=https://${NEODB_SITE_DOMAIN}/media/ + - TAKAHE_EMAIL_FROM=no-reply@${NEODB_SITE_DOMAIN} + - TAKAHE_DATABASE_SERVER=postgres://takahe:aubergine@takahe-db/takahe + - TAKAHE_CACHES_DEFAULT=redis://redis:6379/0 + - TAKAHE_MEDIA_BACKEND=local://www/media/ + - TAKAHE_MEDIA_ROOT=/www/media/ + - TAKAHE_USE_PROXY_HEADERS=true + - TAKAHE_STATOR_CONCURRENCY=4 + - TAKAHE_STATOR_CONCURRENCY_PER_MODEL=2 + - TAKAHE_DEBUG=${NEODB_DEBUG:-False} restart: "on-failure" volumes: - - ${NEODB_DATA:-../data}/neodb-media:/www/media + - ${NEODB_DATA:-../data}/neodb-media:/www/m + - ${NEODB_DATA:-../data}/takahe-media:/www/media + - ${NEODB_DATA:-../data}/takahe-cache:/www/cache + - ${NEODB_DATA:-../data}/www-root:/www/root + # - ${NEODB_DATA:-../data}/log:/var/log/nginx depends_on: - - neodb-redis + - redis - neodb-db - - neodb-search + - typesense + - takahe-db services: - neodb-redis: + redis: image: redis:alpine - ports: - - "16379:6379" + # ports: + # - "16379:6379" + command: redis-server --save 60 1 --loglevel warning + volumes: + - ${NEODB_DATA:-../data}/redis:/data - neodb-search: + typesense: image: typesense/typesense:0.25.0 restart: "on-failure" - healthcheck: - test: ['CMD', 'curl', '-vf', 'http://127.0.0.1:8108/health'] + # healthcheck: + # test: ['CMD', 'curl', '-vf', 'http://127.0.0.1:8108/health'] # ports: # - "18108:8108" environment: @@ -57,7 +86,7 @@ services: healthcheck: test: ['CMD', 'pg_isready', '-U', 'neodb'] volumes: - - ${NEODB_DATA:-../data}/neodb-data:/var/lib/postgresql/data + - ${NEODB_DATA:-../data}/neodb-db:/var/lib/postgresql/data # ports: # - "15432:5432" environment: @@ -65,23 +94,40 @@ services: - POSTGRES_USER=neodb - POSTGRES_PASSWORD=aubergine + takahe-db: + image: postgres:14-alpine + healthcheck: + test: ['CMD', 'pg_isready', '-U', 'takahe'] + volumes: + - ${NEODB_DATA:-../data}/takahe-db:/var/lib/postgresql/data + # ports: + # - "16432:5432" + environment: + - POSTGRES_DB=takahe + - POSTGRES_USER=takahe + - POSTGRES_PASSWORD=aubergine + migration: <<: *neodb-service restart: "no" - command: python /neodb/manage.py migrate + command: "sh -c 'python /takahe/manage.py migrate && python /neodb/manage.py migrate'" depends_on: neodb-db: condition: service_healthy - neodb-search: + typesense: condition: service_started - neodb-redis: + redis: condition: service_started + takahe-db: + condition: service_healthy neodb-web: <<: *neodb-service # ports: # - "18000:8000" - command: gunicorn boofilsic.wsgi -w 8 --preload -b 0.0.0.0:8000 + command: gunicorn boofilsic.wsgi -w ${NEODB_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 + healthcheck: + test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/discover/'] depends_on: migration: condition: service_completed_successfully @@ -93,18 +139,50 @@ services: migration: condition: service_completed_successfully - neodb-worker-secondary: + neodb-worker-extra: <<: *neodb-service command: python /neodb/manage.py rqworker --with-scheduler fetch crawl depends_on: migration: condition: service_completed_successfully - neodb-nginx: + takahe-web: <<: *neodb-service - command: nginx + # ports: + # - "19000:8000" + command: gunicorn --chdir /takahe takahe.wsgi -w ${TAKAHE_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 + healthcheck: + test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/nodeinfo/2.0/'] depends_on: + migration: + condition: service_completed_successfully + + takahe-stator: + <<: *neodb-service + command: python /takahe/manage.py runstator + depends_on: + migration: + condition: service_completed_successfully + + nginx: + <<: *neodb-service + user: "root:root" + command: nginx -g 'daemon off;' + depends_on: + takahe-web: + condition: service_started neodb-web: condition: service_started ports: - "${NEODB_PORT:-8000}:8000" + + shell: + <<: *neodb-service + command: bash + profiles: ["tools"] + + root: + <<: *neodb-service + command: bash + profiles: ["tools"] + user: "root:root" diff --git a/misc/neodb-manage b/misc/neodb-manage new file mode 100755 index 00000000..1f1558e1 --- /dev/null +++ b/misc/neodb-manage @@ -0,0 +1,2 @@ +#!/bin/sh +python /neodb/manage.py $@ diff --git a/misc/nginx.conf.d/neodb.conf b/misc/nginx.conf.d/neodb.conf index ebb18efa..23703612 100644 --- a/misc/nginx.conf.d/neodb.conf +++ b/misc/nginx.conf.d/neodb.conf @@ -1,22 +1,107 @@ +proxy_cache_path /www/cache levels=1:2 keys_zone=takahe:20m inactive=14d max_size=1g; + +upstream neodb { + server neodb-web:8000; +} + +upstream takahe { + server takahe-web:8000; +} + server { - server_name neodb.social; listen 8000; - location = /favicon.ico { - root /www; - access_log off; log_not_found off; + + charset utf-8; + ignore_invalid_headers on; + client_max_body_size 100M; + client_body_buffer_size 128k; + proxy_connect_timeout 900; + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-Proto https; + proxy_http_version 1.1; + proxy_hide_header X-Takahe-User; + proxy_hide_header X-Takahe-Identity; + + # allow admin to serv their own robots.txt/favicon.ico/... + location ~ ^/\w+\.\w+$ { + root /www/root; + access_log off; + log_not_found off; } - location / { - client_max_body_size 100M; - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-Proto https; - proxy_pass http://neodb-web:8000; + location /static/ { + alias /takahe/static-collected/; + add_header Cache-Control "public, max-age=604800, immutable"; } - location /s/ { - root /www; + alias /neodb/static/; + add_header Cache-Control "public, max-age=604800, immutable"; } - location /m/ { - root /www; + alias /www/m/; + add_header Cache-Control "public, max-age=604800, immutable"; + } + # Proxies media and remote media with caching + location ~* ^/(media|proxy) { + # Cache media and proxied resources + proxy_cache takahe; + proxy_cache_key $host$uri; + proxy_cache_valid 200 304 4h; + proxy_cache_valid 301 307 4h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 1h; + add_header X-Cache $upstream_cache_status; + + # Signal to Takahē that we support full URI accel proxying + proxy_set_header X-Takahe-Accel true; + proxy_pass http://takahe; + } + # Internal target for X-Accel redirects that stashes the URI in a var + location /__takahe_accel__/ { + internal; + set $takahe_realuri $upstream_http_x_takahe_realuri; + rewrite ^/(.+) /__takahe_accel__/real/; + } + # Real internal-only target for X-Accel redirects + location /__takahe_accel__/real/ { + # Only allow internal redirects + internal; + + # # Reconstruct the remote URL + resolver 9.9.9.9 8.8.8.8 valid=300s; + + # Unset Authorization and Cookie for security reasons. + proxy_set_header Authorization ''; + proxy_set_header Cookie ''; + proxy_set_header User-Agent 'takahe/nginx'; + proxy_set_header Host $proxy_host; + proxy_set_header X-Forwarded-For ''; + proxy_set_header X-Forwarded-Host ''; + proxy_set_header X-Forwarded-Server ''; + proxy_set_header X-Real-Ip ''; + + # Stops the local disk from being written to (just forwards data through) + proxy_max_temp_file_size 0; + + # Proxy the remote file through to the client + proxy_pass $takahe_realuri; + proxy_ssl_server_name on; + add_header X-Takahe-Accel "HIT"; + + # Cache these responses too + proxy_cache takahe; + # Cache after a single request + proxy_cache_min_uses 1; + proxy_cache_key $takahe_realuri; + proxy_cache_valid 200 304 720h; + proxy_cache_valid 301 307 12h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 72h; + add_header X-Cache $upstream_cache_status; + } + location ~* ^/(@|\.well-known|actor|inbox|nodeinfo|api/v1|api/v2|auth|oauth|tags|settings|media|proxy|admin|djadmin) { + proxy_pass http://takahe; + } + location / { + proxy_pass http://neodb; } } diff --git a/misc/takahe-manage b/misc/takahe-manage new file mode 100755 index 00000000..1aee42c7 --- /dev/null +++ b/misc/takahe-manage @@ -0,0 +1,2 @@ +#!/bin/sh +python /takahe/manage.py $@ diff --git a/neodb-takahe b/neodb-takahe new file mode 160000 index 00000000..4bf7dd6b --- /dev/null +++ b/neodb-takahe @@ -0,0 +1 @@ +Subproject commit 4bf7dd6b6e6594fdfe2df4e9b3b5383d5aea7063 diff --git a/neodb.env.dist b/neodb.env.dist deleted file mode 100644 index ecb44ed3..00000000 --- a/neodb.env.dist +++ /dev/null @@ -1,6 +0,0 @@ -NEODB_SECRET_KEY=change_me -NEODB_SITE_NAME=Example Site -NEODB_SITE_DOMAIN=example.site -#NEODB_PORT=8000 -#NEODB_SSL=1 -#NEODB_DATA=/var/lib/neodb diff --git a/neodb.env.example b/neodb.env.example new file mode 100644 index 00000000..ad1f5f6a --- /dev/null +++ b/neodb.env.example @@ -0,0 +1,23 @@ +# NEODB Configuration + +# copy along with docker-compose.yml, rename it to .env + +# Change these before start the instance for the first time +NEODB_SECRET_KEY=change_me +NEODB_SITE_NAME=Example Site +NEODB_SITE_DOMAIN=example.site + +# HTTP port your reverse proxy should set request to +# NEODB_PORT=8000 + +# Path to store db/media/cache/etc, must be writable +# NEODB_DATA=/var/lib/neodb + +# Scaling parameters +# NEODB_WEB_WORKER_NUM=32 +# TAKAHE_WEB_WORKER_NUM=32 +# TAKAHE_STATOR_CONCURRENCY=10 +# TAKAHE_STATOR_CONCURRENCY_PER_MODEL=10 + +# Turn on DEBUG mode, either set this to True or don't set it at all +# NEODB_DEBUG=True diff --git a/pyproject.toml b/pyproject.toml index e48493aa..cd6bdd9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.pyright] -exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*" ] +exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ] [tool.djlint] ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031" diff --git a/requirements.txt b/requirements.txt index 87127e33..9ae79ac6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,6 @@ django-user-messages dnspython easy-thumbnails filetype -fontawesomefree gunicorn httpx igdb-api-v4 diff --git a/takahe/migrations/0001_initial.py b/takahe/migrations/0001_initial.py index 2b66c095..7b51373e 100644 --- a/takahe/migrations/0001_initial.py +++ b/takahe/migrations/0001_initial.py @@ -506,4 +506,55 @@ class Migration(migrations.Migration): "db_table": "users_inboxmessage", }, ), + migrations.CreateModel( + name="Config", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("key", models.CharField(max_length=500)), + ("json", models.JSONField(blank=True, null=True)), + ("image", models.ImageField(blank=True, null=True, upload_to="")), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.domain", + ), + ), + ( + "identity", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.identity", + ), + ), + ( + "user", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.user", + ), + ), + ], + options={ + "db_table": "core_config", + "unique_together": {("key", "user", "identity", "domain")}, + }, + ), ] diff --git a/takahe/utils.py b/takahe/utils.py index 82e128f7..0f25df2d 100644 --- a/takahe/utils.py +++ b/takahe/utils.py @@ -342,9 +342,9 @@ def post( content: str, visibility: Visibilities, data: dict | None = None, - reply_to_pk: int | None = None, post_pk: int | None = None, post_time: datetime.datetime | None = None, + reply_to_pk: int | None = None, ) -> int | None: identity = Identity.objects.get(pk=author_pk) post = ( @@ -403,7 +403,7 @@ def post_mark(mark, share_as_new_post: bool): if user.preference.mastodon_append_tag else "" ) - stars = _rating_to_emoji(mark.rating_grade, 0) + stars = _rating_to_emoji(mark.rating_grade, 1) item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}" pre_conetent = ( @@ -518,7 +518,9 @@ def get_post_stats(post_pk: int) -> dict: return post.stats or {} @staticmethod - def get_post_replies(post_pk: int, identity_pk: int | None): + def get_post_replies(post_pk: int | None, identity_pk: int | None): + if not post_pk: + return Post.objects.none() node = Post.objects.filter(pk=post_pk).first() if not node: return Post.objects.none() diff --git a/users/migrations/0001_initial.py b/users/migrations/0001_initial.py index 9e000a06..6f4dd85b 100644 --- a/users/migrations/0001_initial.py +++ b/users/migrations/0001_initial.py @@ -137,9 +137,6 @@ class Migration(migrations.Migration): ), ), ], - managers=[ - ("objects", django.contrib.auth.models.UserManager()), - ], ), migrations.CreateModel( name="Preference", diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py index ba88a9fc..41fd8c43 100644 --- a/users/migrations/0013_init_identity.py +++ b/users/migrations/0013_init_identity.py @@ -47,7 +47,7 @@ def init_domain(apps, schema_editor): key="site_name", user=None, identity=None, - domain=domain, + domain_id=domain, defaults={"json": name}, ) diff --git a/users/models/apidentity.py b/users/models/apidentity.py index c82d14cc..a7c7a075 100644 --- a/users/models/apidentity.py +++ b/users/models/apidentity.py @@ -76,8 +76,10 @@ def summary(self): @property def avatar(self): - # return self.takahe_identity.icon_uri or static("img/avatar.svg") # fixme - return f"/proxy/identity_icon/{self.pk}/" + if self.local: + return self.takahe_identity.icon_uri or static("img/avatar.svg") + else: + return f"/proxy/identity_icon/{self.pk}/" @property def url(self): From 6de533552824cd6a1cca195bcfad178d4c847fe2 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sat, 19 Aug 2023 21:59:43 +0000 Subject: [PATCH 11/43] multi-staged docker build --- .dockerignore | 2 + .github/workflows/docker-dev.yml | 42 ++++++++++++++++++++ Dockerfile | 66 +++++++++++++++++++------------- docker-compose.yml | 14 +++---- misc/bin/neodb-manage | 2 + misc/bin/takahe-manage | 2 + misc/neodb-manage | 2 - misc/takahe-manage | 2 - 8 files changed, 95 insertions(+), 37 deletions(-) create mode 100644 .github/workflows/docker-dev.yml create mode 100755 misc/bin/neodb-manage create mode 100755 misc/bin/takahe-manage delete mode 100755 misc/neodb-manage delete mode 100755 misc/takahe-manage diff --git a/.dockerignore b/.dockerignore index f76a0e48..9ddf87ec 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,3 +8,5 @@ __pycache__ /doc /media /static +/docker-compose.yml +/Dockerfile diff --git a/.github/workflows/docker-dev.yml b/.github/workflows/docker-dev.yml new file mode 100644 index 00000000..66346290 --- /dev/null +++ b/.github/workflows/docker-dev.yml @@ -0,0 +1,42 @@ +name: publish + +on: [push, pull_request] + +jobs: + push_to_docker_hub: + name: Push image to Docker Hub + if: github.repository_owner == 'alphatownsman' + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v3 + with: + submodules: 'true' + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: neodb/neodb + + - name: Login to DockerHub + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: . + platforms: "linux/amd64,linux/arm64" + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/Dockerfile b/Dockerfile index 25decdc1..fc180bcf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,40 +1,54 @@ # syntax=docker/dockerfile:1 -FROM python:3.11-slim +FROM python:3.11-slim as build ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 -RUN useradd -U app -COPY . /neodb -RUN mkdir -p /www + +RUN --mount=type=cache,sharing=locked,target=/var/cache/apt apt-get update \ + && apt-get install -y --no-install-recommends build-essential libpq-dev python3-venv opencc git + +COPY requirements.txt /neodb/ WORKDIR /neodb -RUN mv neodb-takahe /takahe -RUN cp misc/neodb-manage misc/takahe-manage /bin -RUN --mount=type=cache,target=/var/cache/apt apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential \ - libpq-dev \ +RUN python -m venv .venv +RUN --mount=type=cache,sharing=locked,target=/root/.cache .venv/bin/python3 -m pip install --upgrade -r requirements.txt + +COPY neodb-takahe/requirements.txt /takahe/ +WORKDIR /takahe +RUN python -m venv /takahe/.venv +RUN --mount=type=cache,sharing=locked,target=/root/.cache .venv/bin/python3 -m pip install --upgrade -r requirements.txt + +RUN apt-get purge -y --auto-remove build-essential && rm -rf /var/lib/apt/lists/* + +# runtime stage +FROM python:3.11-slim as runtime +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +RUN --mount=type=cache,sharing=locked,target=/var/cache/apt-run apt-get update \ + && apt-get install -y --no-install-recommends libpq-dev \ busybox \ - postgresql-client \ nginx \ - opencc \ - git + opencc RUN busybox --install -COPY misc/nginx.conf.d/* /etc/nginx/conf.d/ - -RUN --mount=type=cache,target=/root/.cache python3 -m pip install --upgrade -r requirements.txt -RUN --mount=type=cache,target=/root/.cache cd /takahe && python3 -m pip install --upgrade -r requirements.txt - -RUN apt-get purge -y --auto-remove \ - build-essential \ - libpq-dev \ - && rm -rf /var/lib/apt/lists/* +COPY . /neodb +WORKDIR /neodb +COPY --from=build /neodb/.venv .venv +RUN .venv/bin/python3 manage.py compilescss +RUN .venv/bin/python3 manage.py collectstatic --noinput -RUN python3 manage.py compilescss \ - && python3 manage.py collectstatic --noinput +RUN mv /neodb/neodb-takahe /takahe +WORKDIR /takahe +COPY --from=build /takahe/.venv .venv +RUN pwd && ls +RUN TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" .venv/bin/python3 manage.py collectstatic --noinput -RUN cd /takahe && TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" python3 manage.py collectstatic --noinput +COPY misc/nginx.conf.d/* /etc/nginx/conf.d/ +COPY misc/bin/* /bin/ +RUN mkdir -p /www +RUN useradd -U app +WORKDIR /neodb USER app:app # invoke check by default -CMD [ "sh", "-c", 'python3 /neodb/manage.py check && TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" python3 manage.py collectstatic --noinput python3 /takahe/manage.py check' ] +CMD [ "sh", "-c", 'neodb-manage check && TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" takahe-manage check' ] diff --git a/docker-compose.yml b/docker-compose.yml index d1d3ac98..1e063418 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,7 +13,7 @@ version: "3.8" x-shared: neodb-service: &neodb-service build: . - image: nerodb/neodb:latest + image: neodb/neodb:${TAG:-latest} environment: - NEODB_DB_NAME=neodb - NEODB_DB_USER=neodb @@ -110,7 +110,7 @@ services: migration: <<: *neodb-service restart: "no" - command: "sh -c 'python /takahe/manage.py migrate && python /neodb/manage.py migrate'" + command: "sh -c '/takahe/.venv/bin/python /takahe/manage.py migrate && /neodb/.venv/bin/python /neodb/manage.py migrate'" depends_on: neodb-db: condition: service_healthy @@ -125,7 +125,7 @@ services: <<: *neodb-service # ports: # - "18000:8000" - command: gunicorn boofilsic.wsgi -w ${NEODB_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 + command: /neodb/.venv/bin/gunicorn boofilsic.wsgi -w ${NEODB_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 healthcheck: test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/discover/'] depends_on: @@ -134,14 +134,14 @@ services: neodb-worker: <<: *neodb-service - command: python /neodb/manage.py rqworker --with-scheduler import export mastodon fetch crawl + command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler import export mastodon fetch crawl depends_on: migration: condition: service_completed_successfully neodb-worker-extra: <<: *neodb-service - command: python /neodb/manage.py rqworker --with-scheduler fetch crawl + command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler fetch crawl depends_on: migration: condition: service_completed_successfully @@ -150,7 +150,7 @@ services: <<: *neodb-service # ports: # - "19000:8000" - command: gunicorn --chdir /takahe takahe.wsgi -w ${TAKAHE_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 + command: /takahe/.venv/bin/gunicorn --chdir /takahe takahe.wsgi -w ${TAKAHE_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 healthcheck: test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/nodeinfo/2.0/'] depends_on: @@ -159,7 +159,7 @@ services: takahe-stator: <<: *neodb-service - command: python /takahe/manage.py runstator + command: /takahe/.venv/bin/python /takahe/manage.py runstator depends_on: migration: condition: service_completed_successfully diff --git a/misc/bin/neodb-manage b/misc/bin/neodb-manage new file mode 100755 index 00000000..be7302c2 --- /dev/null +++ b/misc/bin/neodb-manage @@ -0,0 +1,2 @@ +#!/bin/sh +/neodb/.venv/bin/python /neodb/manage.py $@ diff --git a/misc/bin/takahe-manage b/misc/bin/takahe-manage new file mode 100755 index 00000000..986bb3c1 --- /dev/null +++ b/misc/bin/takahe-manage @@ -0,0 +1,2 @@ +#!/bin/sh +/takahe/.venv/bin/python /takahe/manage.py $@ diff --git a/misc/neodb-manage b/misc/neodb-manage deleted file mode 100755 index 1f1558e1..00000000 --- a/misc/neodb-manage +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -python /neodb/manage.py $@ diff --git a/misc/takahe-manage b/misc/takahe-manage deleted file mode 100755 index 1aee42c7..00000000 --- a/misc/takahe-manage +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -python /takahe/manage.py $@ From 4702b4feb3086b2f5ec62ae10e2b294d88d1ab91 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 20 Aug 2023 04:27:23 +0000 Subject: [PATCH 12/43] add init command to ensure settings post migration --- .dockerignore | 12 +++++++-- Dockerfile | 5 ++-- boofilsic/settings.py | 39 +++++----------------------- catalog/search/typesense.py | 35 +++++++++++++++++++------ common/management/commands/setup.py | 19 ++++++++++++++ common/urls.py | 1 + common/views.py | 40 +++++++++++++++++++++++++++++ docker-compose.yml | 16 ++++++++---- journal/views/post.py | 1 - misc/bin/neodb-init | 10 ++++++++ misc/bin/nginx-start | 3 +++ misc/nginx.conf.d/neodb.conf | 6 ++--- neodb-takahe | 2 +- neodb.env.example | 3 +++ 14 files changed, 138 insertions(+), 54 deletions(-) create mode 100644 common/management/commands/setup.py create mode 100755 misc/bin/neodb-init create mode 100755 misc/bin/nginx-start diff --git a/.dockerignore b/.dockerignore index 9ddf87ec..22ed546c 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,9 +4,17 @@ .vscode .github .git +.gitignore +.pre-commit-config.yaml __pycache__ +/Dockerfile /doc +/docker-compose.yml /media /static -/docker-compose.yml -/Dockerfile +/test_data +/neodb +/neodb-takahe/doc +/neodb-takahe/docker +/neodb-takahe/static-collected +/neodb-takahe/takahe/local_settings.py diff --git a/Dockerfile b/Dockerfile index fc180bcf..6a6e8d39 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,6 +27,7 @@ RUN --mount=type=cache,sharing=locked,target=/var/cache/apt-run apt-get update \ && apt-get install -y --no-install-recommends libpq-dev \ busybox \ nginx \ + gettext-base \ opencc RUN busybox --install @@ -42,12 +43,12 @@ COPY --from=build /takahe/.venv .venv RUN pwd && ls RUN TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" .venv/bin/python3 manage.py collectstatic --noinput -COPY misc/nginx.conf.d/* /etc/nginx/conf.d/ +WORKDIR /neodb COPY misc/bin/* /bin/ RUN mkdir -p /www RUN useradd -U app +RUN rm -rf /var/lib/apt/lists/* -WORKDIR /neodb USER app:app # invoke check by default diff --git a/boofilsic/settings.py b/boofilsic/settings.py index f5424cbd..d03d11a8 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -201,6 +201,10 @@ STATIC_URL = "/s/" STATIC_ROOT = os.environ.get("NEODB_STATIC_ROOT", os.path.join(BASE_DIR, "static/")) +if DEBUG: + # django-sass-processor will generate neodb.css on-the-fly when DEBUG + # NEODB_STATIC_ROOT is readonly in docker mode, so we give it a writable place + SASS_PROCESSOR_ROOT = "/tmp" STATICFILES_STORAGE = "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" STATICFILES_FINDERS = [ @@ -338,42 +342,13 @@ REDIS_DB = int(os.environ.get("NEODB_REDIS_DB", 0)) RQ_QUEUES = { - "mastodon": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "export": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "import": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "fetch": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "crawl": { + q: { "HOST": REDIS_HOST, "PORT": REDIS_PORT, "DB": REDIS_DB, "DEFAULT_TIMEOUT": -1, - }, - "doufen": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, + } + for q in ["mastodon", "export", "import", "fetch", "crawl", "ap"] } RQ_SHOW_ADMIN_LINK = True diff --git a/catalog/search/typesense.py b/catalog/search/typesense.py index 57816a12..eb2d83f8 100644 --- a/catalog/search/typesense.py +++ b/catalog/search/typesense.py @@ -2,12 +2,14 @@ import types from datetime import timedelta from pprint import pprint +from time import sleep import django_rq import typesense from django.conf import settings from django.db.models.signals import post_delete, post_save from django_redis import get_redis_connection +from loguru import logger from rq.job import Job from typesense.collection import Collection from typesense.exceptions import ObjectNotFound @@ -51,9 +53,6 @@ SEARCH_PAGE_SIZE = 20 -logger = logging.getLogger(__name__) - - _PENDING_INDEX_KEY = "pending_index_ids" _PENDING_INDEX_QUEUE = "import" _PENDING_INDEX_JOB_ID = "pending_index_flush" @@ -184,10 +183,30 @@ def config(cls): @classmethod def init(cls): - idx = typesense.Client(settings.TYPESENSE_CONNECTION).collections - if idx: - # idx.delete() - idx.create(cls.config()) + try: + client = typesense.Client(settings.TYPESENSE_CONNECTION) + wait = 5 + while not client.operations.is_healthy() and wait: + logger.warning("Typesense: server not healthy") + sleep(1) + wait -= 1 + idx = client.collections[settings.TYPESENSE_INDEX_NAME] + if idx: + try: + i = idx.retrieve() + logger.debug( + f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents" + ) + return + except: + client.collections.create(cls.config()) + logger.info( + f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created" + ) + return + logger.error("Typesense: server unknown error") + except Exception as e: + logger.error(f"Typesense: server error {e}") @classmethod def delete_index(cls): @@ -309,7 +328,7 @@ def delete_item(cls, obj): try: cls.instance().documents[pk].delete() except Exception as e: - logger.warn(f"delete item error: \n{e}") + logger.warning(f"delete item error: \n{e}") @classmethod def search(cls, q, page=1, categories=None, tag=None, sort=None): diff --git a/common/management/commands/setup.py b/common/management/commands/setup.py new file mode 100644 index 00000000..d25b1ecf --- /dev/null +++ b/common/management/commands/setup.py @@ -0,0 +1,19 @@ +from django.core.management.base import BaseCommand + +from catalog.search.typesense import Indexer + + +class Command(BaseCommand): + help = "Post-Migration Setup" + + def handle(self, *args, **options): + # Update site name if changed + + # Create/update admin user if configured in env + + # Create basic emoji if not exists + + # Create search index if not exists + Indexer.init() + + # Register cron jobs if not yet diff --git a/common/urls.py b/common/urls.py index e8bc0a15..9686ac1a 100644 --- a/common/urls.py +++ b/common/urls.py @@ -7,5 +7,6 @@ path("", home), path("home/", home, name="home"), path("me/", me, name="me"), + path("nodeinfo/2.0/", nodeinfo2), re_path("^~neodb~(?P.+)", ap_redirect), ] diff --git a/common/views.py b/common/views.py index ce10d644..cbfbb3ac 100644 --- a/common/views.py +++ b/common/views.py @@ -1,8 +1,13 @@ +from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required +from django.db import connection +from django.http import JsonResponse from django.shortcuts import redirect, render from django.urls import reverse +from users.models import User + @login_required def me(request): @@ -26,6 +31,41 @@ def ap_redirect(request, uri): return redirect(uri) +def nodeinfo2(request): + usage = {"users": {"total": User.objects.count()}} + # return estimated number of marks as posts, since count the whole table is slow + # TODO filter local with SQL function in https://wiki.postgresql.org/wiki/Count_estimate + with connection.cursor() as cursor: + cursor.execute( + "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'journal_shelfmember';" + ) + row = cursor.fetchone() + if row: + usage["localPosts"] = row[0] + with connection.cursor() as cursor: + cursor.execute( + "SELECT n_live_tup FROM pg_stat_all_tables WHERE relname = 'journal_comment';" + ) + row = cursor.fetchone() + if row: + usage["localComments"] = row[0] + return JsonResponse( + { + "version": "2.1", + "software": { + "name": "neodb", + "version": settings.NEODB_VERSION, + "repository": "https://github.com/neodb-social/neodb", + "homepage": "https://neodb.net/", + }, + "protocols": ["activitypub", "neodb"], + "services": {"outbound": [], "inbound": []}, + "usage": usage, + "metadata": {"nodeName": settings.SITE_INFO["site_name"]}, + } + ) + + def error_400(request, exception=None): return render( request, diff --git a/docker-compose.yml b/docker-compose.yml index 1e063418..674dc2bf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,6 +15,10 @@ x-shared: build: . image: neodb/neodb:${TAG:-latest} environment: + - NEODB_SITE_NAME + - NEODB_SITE_DOMAIN + - NEODB_DEBUG + - NEODB_SECRET_KEY - NEODB_DB_NAME=neodb - NEODB_DB_USER=neodb - NEODB_DB_PASSWORD=aubergine @@ -29,6 +33,7 @@ x-shared: - NEODB_TYPESENSE_KEY=eggplant - NEODB_FROM_EMAIL=no-reply@${NEODB_SITE_DOMAIN} - NEODB_MEDIA_ROOT=/www/m/ + - NEODB_WEB_SERVER=neodb-web:8000 - TAKAHE_DB_NAME=takahe - TAKAHE_DB_USER=takahe - TAKAHE_DB_PASSWORD=aubergine @@ -46,6 +51,7 @@ x-shared: - TAKAHE_STATOR_CONCURRENCY=4 - TAKAHE_STATOR_CONCURRENCY_PER_MODEL=2 - TAKAHE_DEBUG=${NEODB_DEBUG:-False} + - TAKAHE_WEB_SERVER=takahe-web:8000 restart: "on-failure" volumes: - ${NEODB_DATA:-../data}/neodb-media:/www/m @@ -110,7 +116,7 @@ services: migration: <<: *neodb-service restart: "no" - command: "sh -c '/takahe/.venv/bin/python /takahe/manage.py migrate && /neodb/.venv/bin/python /neodb/manage.py migrate'" + command: /bin/neodb-init depends_on: neodb-db: condition: service_healthy @@ -127,21 +133,21 @@ services: # - "18000:8000" command: /neodb/.venv/bin/gunicorn boofilsic.wsgi -w ${NEODB_WEB_WORKER_NUM:-8} --preload -b 0.0.0.0:8000 healthcheck: - test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/discover/'] + test: ['CMD', 'wget', '-qO/tmp/test', 'http://127.0.0.1:8000/nodeinfo/2.0/'] depends_on: migration: condition: service_completed_successfully neodb-worker: <<: *neodb-service - command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler import export mastodon fetch crawl + command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler import export mastodon fetch crawl ap depends_on: migration: condition: service_completed_successfully neodb-worker-extra: <<: *neodb-service - command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler fetch crawl + command: /neodb/.venv/bin/python /neodb/manage.py rqworker --with-scheduler fetch crawl ap depends_on: migration: condition: service_completed_successfully @@ -167,7 +173,7 @@ services: nginx: <<: *neodb-service user: "root:root" - command: nginx -g 'daemon off;' + command: nginx-start depends_on: takahe-web: condition: service_started diff --git a/journal/views/post.py b/journal/views/post.py index cb9bd0eb..96779d7b 100644 --- a/journal/views/post.py +++ b/journal/views/post.py @@ -3,7 +3,6 @@ from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.utils.translation import gettext_lazy as _ -from httpx import request from catalog.models import * from common.utils import ( diff --git a/misc/bin/neodb-init b/misc/bin/neodb-init new file mode 100755 index 00000000..60048751 --- /dev/null +++ b/misc/bin/neodb-init @@ -0,0 +1,10 @@ +#!/bin/sh +echo '\033[0;35m====== NeoDB ======\033[0m' +echo Initializing ${NEODB_SITE_NAME} on ${NEODB_SITE_DOMAIN} + +[[ -z "${NEODB_DEBUG}" ]] || echo DEBUG is ON +[[ -z "${NEODB_DEBUG}" ]] || set + +/takahe/.venv/bin/python /takahe/manage.py migrate || exit $? +/neodb/.venv/bin/python /neodb/manage.py migrate || exit $? +/neodb/.venv/bin/python /neodb/manage.py setup || exit $? diff --git a/misc/bin/nginx-start b/misc/bin/nginx-start new file mode 100755 index 00000000..9c93c6fe --- /dev/null +++ b/misc/bin/nginx-start @@ -0,0 +1,3 @@ +#!/bin/sh +envsubst '${NEODB_WEB_SERVER} ${TAKAHE_WEB_SERVER}' < /neodb/misc/nginx.conf.d/neodb.conf > /etc/nginx/conf.d/neodb.conf +nginx -g 'daemon off;' diff --git a/misc/nginx.conf.d/neodb.conf b/misc/nginx.conf.d/neodb.conf index 23703612..4293b2a2 100644 --- a/misc/nginx.conf.d/neodb.conf +++ b/misc/nginx.conf.d/neodb.conf @@ -1,11 +1,11 @@ proxy_cache_path /www/cache levels=1:2 keys_zone=takahe:20m inactive=14d max_size=1g; upstream neodb { - server neodb-web:8000; + server ${NEODB_WEB_SERVER}; } upstream takahe { - server takahe-web:8000; + server ${TAKAHE_WEB_SERVER}; } server { @@ -98,7 +98,7 @@ server { proxy_cache_valid any 72h; add_header X-Cache $upstream_cache_status; } - location ~* ^/(@|\.well-known|actor|inbox|nodeinfo|api/v1|api/v2|auth|oauth|tags|settings|media|proxy|admin|djadmin) { + location ~* ^/(@|\.well-known|actor|inbox|api/v1|api/v2|auth|oauth|tags|settings|media|proxy|admin|djadmin) { proxy_pass http://takahe; } location / { diff --git a/neodb-takahe b/neodb-takahe index 4bf7dd6b..af8880f1 160000 --- a/neodb-takahe +++ b/neodb-takahe @@ -1 +1 @@ -Subproject commit 4bf7dd6b6e6594fdfe2df4e9b3b5383d5aea7063 +Subproject commit af8880f1b61556ae83e1f9970ba3ee6bbfa84292 diff --git a/neodb.env.example b/neodb.env.example index ad1f5f6a..8f1343f2 100644 --- a/neodb.env.example +++ b/neodb.env.example @@ -21,3 +21,6 @@ NEODB_SITE_DOMAIN=example.site # Turn on DEBUG mode, either set this to True or don't set it at all # NEODB_DEBUG=True + +# pull NeoDB Docker image from another tag/branch +# TAG=latest From c45e9f60166118663349b87d66e55bcc35cf58dd Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 20 Aug 2023 18:27:20 +0000 Subject: [PATCH 13/43] sync site config during boot; customize site logo --- boofilsic/settings.py | 15 +++++++--- common/management/commands/setup.py | 39 +++++++++++++++++++++++++ common/templates/_header.html | 2 +- common/templates/common_libs.html | 4 +-- docker-compose.yml | 2 ++ neodb.env.example | 8 ++++- requirements.txt | 1 + users/models/apidentity.py | 4 ++- users/templates/users/login.html | 2 +- users/templates/users/register.html | 2 +- users/templates/users/verify_email.html | 2 +- 11 files changed, 69 insertions(+), 12 deletions(-) diff --git a/boofilsic/settings.py b/boofilsic/settings.py index d03d11a8..6b579557 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -1,14 +1,15 @@ import os -# import django_stubs_ext +import environ -# django_stubs_ext.monkeypatch() +env = environ.Env( + # set casting, default value + DEBUG=(bool, False) +) NEODB_VERSION = "0.8" DATABASE_ROUTERS = ["takahe.db_routes.TakaheRouter"] -PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__)) - # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -406,3 +407,9 @@ OAUTH2_PROVIDER_APPLICATION_MODEL = "developer.Application" DEVELOPER_CONSOLE_APPLICATION_CLIENT_ID = "NEODB_DEVELOPER_CONSOLE" + +SETUP_ADMIN_USERNAMES = [ + u for u in os.environ.get("NEODB_ADMIN_USERNAMES", "").split(",") if u +] + +SITE_INFO["site_logo"] = os.environ.get("NEODB_SITE_LOGO", "/s/img/logo.svg") diff --git a/common/management/commands/setup.py b/common/management/commands/setup.py index d25b1ecf..7cf88b0e 100644 --- a/common/management/commands/setup.py +++ b/common/management/commands/setup.py @@ -1,15 +1,54 @@ +from django.conf import settings from django.core.management.base import BaseCommand +from loguru import logger from catalog.search.typesense import Indexer +from takahe.models import Config as TakaheConfig +from takahe.models import Domain as TakaheDomain +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser +from users.models import User class Command(BaseCommand): help = "Post-Migration Setup" + def sync_site_config(self): + domain = settings.SITE_INFO["site_domain"] + name = settings.SITE_INFO["site_name"] + service_domain = settings.SITE_INFO.get("site_service_domain") + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=None, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain_id=domain, + defaults={"json": name}, + ) + + def sync_admin_user(self): + users = User.objects.filter(username__in=settings.SETUP_ADMIN_USERNAMES) + for user in users: + if user.is_superuser: + logger.debug(f"User {user.username} is already admin") + else: + user.is_superuser = True + user.save(update_fields=["is_superuser"]) + TakaheUser.objects.filter(email="@" + user.username).update(admin=True) + logger.info(f"Updated user {user.username} as admin") + def handle(self, *args, **options): # Update site name if changed + self.sync_site_config() # Create/update admin user if configured in env + self.sync_admin_user() # Create basic emoji if not exists diff --git a/common/templates/_header.html b/common/templates/_header.html index 62b7c5c6..58418f76 100644 --- a/common/templates/_header.html +++ b/common/templates/_header.html @@ -6,7 +6,7 @@