diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..386c62ba --- /dev/null +++ b/.dockerignore @@ -0,0 +1,20 @@ +.DS_Store +.env +.venv +.vscode +.github +.gitignore +.pre-commit-config.yaml +__pycache__ +/compose.yml +/compose.override.yml +/Dockerfile +/doc +/media +/static +/test_data +/neodb +/neodb-takahe/doc +/neodb-takahe/docker +/neodb-takahe/static-collected +/neodb-takahe/takahe/local_settings.py diff --git a/.github/workflows/lint.yml b/.github/workflows/check.yml similarity index 98% rename from .github/workflows/lint.yml rename to .github/workflows/check.yml index 19f6bbc8..e88219ad 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/check.yml @@ -1,4 +1,4 @@ -name: check +name: code check on: [push, pull_request] diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index a8712436..0223dd9e 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,21 +1,24 @@ -name: Deployment +name: preview deployment on: workflow_run: - workflows: ["all tests"] - branches: [main] + workflows: ["docker image"] types: - completed +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true jobs: deployment: if: ${{ github.event.workflow_run.conclusion == 'success' }} - name: Deployment + name: deployment to preview environment runs-on: ubuntu-latest - environment: preview + environment: ${{ vars.DEPLOY_ENV }} steps: - name: ssh uses: appleboy/ssh-action@master with: host: ${{ secrets.SSH_HOST }} + port: ${{ secrets.SSH_PORT }} username: ${{ secrets.SSH_USER }} key: ${{ secrets.SSH_KEY }} script: ${{ vars.DEPLOY_SCRIPT }} diff --git a/.github/workflows/django.yml b/.github/workflows/django.yml deleted file mode 100644 index 1e7a948e..00000000 --- a/.github/workflows/django.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: all tests - -on: - push: - pull_request: - branches: [ "main" ] - -jobs: - build: - - runs-on: ubuntu-latest - services: - redis: - image: redis - ports: - - 6379:6379 - db: - image: postgres:12.13-alpine - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: admin123 - POSTGRES_DB: test - ports: - - 5432:5432 - options: --mount type=tmpfs,destination=/var/lib/postgresql/data --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - strategy: - max-parallel: 4 - matrix: - python-version: ['3.10', '3.11'] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - cache: pip - - name: Install Dependencies - run: | - pip install -r requirements.txt - - name: Run Tests - run: | - python manage.py test diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..aa4d9c1e --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,54 @@ +name: docker image + +on: + workflow_run: + workflows: ["unit test"] + branches: + - main + - activitypub + types: + - completed + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + build_and_push_image: + name: build image and push to Docker Hub + if: ${{ github.event.workflow_run.conclusion == 'success' }} + # if: github.repository_owner == 'neodb-social' + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v3 + with: + submodules: 'true' + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Docker meta + id: meta + uses: docker/metadata-action@v4 + with: + images: neodb/neodb + + - name: Login to DockerHub + if: github.event_name != 'pull_request' + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push + uses: docker/build-push-action@v4 + with: + context: . + platforms: "linux/amd64,linux/arm64" + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..92818cd0 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,55 @@ +name: unit test + +on: + push: + pull_request: + branches: [ "main" ] + +jobs: + django: + runs-on: ubuntu-latest + services: + redis: + image: redis + ports: + - 6379:6379 + db: + image: postgres + env: + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb + ports: + - 5432:5432 + db2: + image: postgres + env: + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb_takahe + ports: + - 15432:5432 + strategy: + max-parallel: 4 + matrix: + python-version: ['3.11'] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: pip + - name: Install Dependencies + run: | + pip install -r requirements.txt + - name: Run Tests + env: + NEODB_DB_URL: postgres://testuser:testpass@127.0.0.1/test_neodb + TAKAHE_DB_URL: postgres://testuser:testpass@127.0.0.1/test_neodb_takahe + NEODB_REDIS_URL: redis://127.0.0.1:6379/0 + NEODB_SITE_NAME: test + NEODB_SITE_DOMAIN: test.domain + NEODB_SECRET_KEY: test + run: | + python manage.py test diff --git a/.gitmodules b/.gitmodules index c567a1e5..71efbd54 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,3 +2,7 @@ path = webapp url = https://github.com/neodb-social/webapp.git branch = main +[submodule "neodb-takahe"] + path = neodb-takahe + url = https://github.com/neodb-social/neodb-takahe.git + branch = neodb diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2f55a153..119a5f36 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,6 @@ repos: rev: 22.12.0 hooks: - id: black - language_version: python3.11 - repo: https://github.com/Riverside-Healthcare/djLint rev: v1.32.1 diff --git a/Dockerfile b/Dockerfile index 8c18f1be..06eeffb6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,31 +1,58 @@ # syntax=docker/dockerfile:1 -FROM python:3.11-slim-bullseye +FROM python:3.11-slim as build ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 + +RUN --mount=type=cache,sharing=locked,target=/var/cache/apt apt-get update \ + && apt-get install -y --no-install-recommends build-essential libpq-dev python3-venv git + COPY . /neodb + +RUN echo neodb-`cd /neodb && git rev-parse --short HEAD`-`cd /neodb/neodb-takahe && git rev-parse --short HEAD`-`date -u +%Y%m%d%H%M%S` > /neodb/version +RUN rm -rf /neodb/.git /neodb/neodb-takahe/.git + +RUN mv /neodb/neodb-takahe /takahe + WORKDIR /neodb -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential \ - libpq-dev \ +RUN python -m venv /neodb-venv +RUN --mount=type=cache,sharing=locked,target=/root/.cache /neodb-venv/bin/python3 -m pip install --upgrade -r requirements.txt + +WORKDIR /takahe +RUN python -m venv /takahe-venv +RUN --mount=type=cache,sharing=locked,target=/root/.cache /takahe-venv/bin/python3 -m pip install --upgrade -r requirements.txt + +# runtime stage +FROM python:3.11-slim as runtime +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +RUN --mount=type=cache,sharing=locked,target=/var/cache/apt-run apt-get update \ + && apt-get install -y --no-install-recommends libpq-dev \ busybox \ - postgresql-client \ nginx \ - opencc \ - git -COPY misc/nginx.conf.d/* /etc/nginx/conf.d/ -RUN echo >> /etc/nginx/nginx.conf -RUN echo 'daemon off;' >> /etc/nginx/nginx.conf -RUN python3 -m pip install --no-cache-dir --upgrade -r requirements.txt -RUN apt-get purge -y --auto-remove \ - build-essential \ - libpq-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN python3 manage.py compilescss \ - && python3 manage.py collectstatic --noinput -RUN cp -R misc/www /www -RUN mv static /www/static - -# invoke check by default -CMD [ "python3", "/neodb/manage.py", "check" ] + gettext-base +RUN busybox --install + +# postgresql and redis cli are not required, but install for development convenience +RUN --mount=type=cache,sharing=locked,target=/var/cache/apt-run apt-get install -y --no-install-recommends postgresql-client redis-tools +RUN useradd -U app +RUN rm -rf /var/lib/apt/lists/* + +COPY --from=build /neodb /neodb +WORKDIR /neodb +COPY --from=build /neodb-venv /neodb-venv +RUN NEODB_SECRET_KEY="t" NEODB_SITE_DOMAIN="x.y" NEODB_SITE_NAME="z" /neodb-venv/bin/python3 manage.py compilescss +RUN NEODB_SECRET_KEY="t" NEODB_SITE_DOMAIN="x.y" NEODB_SITE_NAME="z" /neodb-venv/bin/python3 manage.py collectstatic --noinput + +COPY --from=build /takahe /takahe +WORKDIR /takahe +COPY --from=build /takahe-venv /takahe-venv +RUN TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" /takahe-venv/bin/python3 manage.py collectstatic --noinput + +WORKDIR /neodb +COPY misc/bin/* /bin/ +RUN mkdir -p /www + +USER app:app + +CMD [ "neodb-hello"] diff --git a/README.md b/README.md index b8000fd2..34b4f8b9 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # Boofilsic/NeoDB -![Test Status](https://github.com/neodb-social/neodb/actions/workflows/django.yml/badge.svg?branch=main) +![](https://github.com/neodb-social/neodb/actions/workflows/check.yml/badge.svg?branch=main) +![](https://github.com/neodb-social/neodb/actions/workflows/tests.yml/badge.svg?branch=main) +![](https://github.com/neodb-social/neodb/actions/workflows/publish.yml/badge.svg?branch=main) Boofilsic/NeoDB is an open source project and free service to help users manage, share and discover collections, reviews and ratings for culture products (e.g. books, movies, music, podcasts, games and performances) in Fediverse. @@ -41,7 +43,7 @@ Boofilsic/NeoDB is an open source project and free service to help users manage, * eligible items, e.g. podcasts and albums, are playable in feed + link Fediverse account and import social graph + share collections and reviews to Fediverse ~~and Twitter~~ feed - + ActivityPub support is under development, a pre-alpha version is available for developers as [docker image](https://hub.docker.com/r/neodb/neodb) + + ActivityPub support is under active development - Other + i18n/language support are planned diff --git a/boofilsic/settings.py b/boofilsic/settings.py index 91829e6d..f222f5be 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -1,25 +1,223 @@ import os -NEODB_VERSION = "0.8" +import environ -PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__)) +NEODB_VERSION = "0.8" -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +# Parse configuration from: +# - environment variables +# - .env file in project root directory +# - /etc/neodb.conf BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +environ.Env.read_env("/etc/neodb.conf") +environ.Env.read_env(os.path.join(BASE_DIR, ".env")) + +# ====== List of user configuration variables ====== +env = environ.FileAwareEnv( + # WARNING: do not run with debug mode turned on in production + NEODB_DEBUG=(bool, True), + # WARNING: must use your own key and keep it secret + NEODB_SECRET_KEY=(str), + # Site information + NEODB_SITE_NAME=(str), + NEODB_SITE_DOMAIN=(str), + NEODB_SITE_LOGO=(str, "/s/img/logo.svg"), + NEODB_SITE_ICON=(str, "/s/img/logo.svg"), + NEODB_USER_ICON=(str, "/s/img/avatar.svg"), + NEODB_SITE_INTRO=(str, ""), + # Links in site footer + NEODB_SITE_LINKS=(dict, {}), + # Invite only mode + # when True: user will not be able to register unless with invite token + # (generated by `neodb-manage invite --create`) + NEODB_INVITE_ONLY=(bool, False), + # Mastodon/Pleroma instance allowed to login, keep empty to allow any instance to login + NEODB_LOGIN_MASTODON_WHITELIST=(list, []), + # DATABASE + NEODB_DB_URL=(str, "postgres://user:pass@127.0.0.1:5432/neodb"), + # Redis, for cache and job queue + NEODB_REDIS_URL=(str, "redis://127.0.0.1:6379/0"), + # Search backend, in one of these formats: + # typesense://user:insecure@127.0.0.1:8108/catalog + NEODB_SEARCH_URL=(str, ""), + # EMAIL CONFIGURATION, in one of these formats: + # "smtp://:@:" + # "smtp+tls://:@:" + # "smtp+ssl://:@:" + # "anymail://?" + NEODB_EMAIL_URL=(str, ""), + # EMAIL FROM + NEODB_EMAIL_FROM=(str, "🧩 NeoDB "), + # ADMIN_USERS + NEODB_ADMIN_USERNAMES=(list, []), + # List of available proxies for proxy downloader, in format of http://server1?url=__URL__,http://s2?url=__URL__,... + NEODB_DOWNLOADER_PROXY_LIST=(list, []), + # Timeout of downloader requests, in seconds + NEODB_DOWNLOADER_REQUEST_TIMEOUT=(int, 90), + # Timeout of downloader cache, in seconds + NEODB_DOWNLOADER_CACHE_TIMEOUT=(int, 300), + # Number of retries of downloader, when site is using RetryDownloader + NEODB_DOWNLOADER_RETRIES=(int, 3), + # INTEGRATED TAKAHE CONFIGURATION + TAKAHE_DB_URL=(str, "postgres://takahe:takahepass@127.0.0.1:5432/takahe"), + # Spotify - https://developer.spotify.com/ + SPOTIFY_API_KEY=(str, "TESTONLY"), + # The Movie Database (TMDB) - https://developer.themoviedb.org/ + TMDB_API_V3_KEY=(str, "TESTONLY"), + # Google Books - https://developers.google.com/books/docs/v1/using - not used at the moment + GOOGLE_API_KEY=(str, "TESTONLY"), + # Discogs - personal access token from https://www.discogs.com/settings/developers + DISCOGS_API_KEY=(str, "TESTONLY"), + # IGDB - https://api-docs.igdb.com/ + IGDB_API_CLIENT_ID=(str, "TESTONLY"), + IGDB_API_CLIENT_SECRET=(str, ""), + # Discord webhooks + DISCORD_WEBHOOKS=(dict, {"user-report": None}), + # Slack API token, for sending exceptions to Slack, may deprecate in future + SLACK_API_TOKEN=(str, ""), +) + +# ====== End of user configuration variables ====== + +SECRET_KEY = env("NEODB_SECRET_KEY") +DEBUG = env("NEODB_DEBUG") +DATABASES = { + "default": env.db_url("NEODB_DB_URL"), + "takahe": env.db_url("TAKAHE_DB_URL"), +} +DATABASES["default"]["OPTIONS"] = {"client_encoding": "UTF8"} +DATABASES["default"]["TEST"] = {"DEPENDENCIES": ["takahe"]} +DATABASES["takahe"]["OPTIONS"] = {"client_encoding": "UTF8"} +DATABASES["takahe"]["TEST"] = {"DEPENDENCIES": []} +CACHES = {"default": env.cache_url("NEODB_REDIS_URL")} -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" -# for legacy deployment: -# DEFAULT_AUTO_FIELD = "django.db.models.AutoField" +_parsed_redis_url = env.url("NEODB_REDIS_URL") +RQ_QUEUES = { + q: { + "HOST": _parsed_redis_url.hostname, + "PORT": _parsed_redis_url.port, + "DB": _parsed_redis_url.path[1:], + "DEFAULT_TIMEOUT": -1, + } + for q in ["mastodon", "export", "import", "fetch", "crawl", "ap", "cron"] +} + +_parsed_search_url = env.url("NEODB_SEARCH_URL") +SEARCH_BACKEND = None +TYPESENSE_CONNECTION = {} +if _parsed_search_url.scheme == "typesense": + SEARCH_BACKEND = "TYPESENSE" + TYPESENSE_CONNECTION = { + "api_key": _parsed_search_url.password, + "nodes": [ + { + "host": _parsed_search_url.hostname, + "port": _parsed_search_url.port, + "protocol": "http", + } + ], + "connection_timeout_seconds": 2, + } + TYPESENSE_INDEX_NAME = _parsed_search_url.path[1:] +# elif _parsed_search_url.scheme == "meilisearch": +# SEARCH_BACKEND = 'MEILISEARCH' +# MEILISEARCH_SERVER = 'http://127.0.0.1:7700' +# MEILISEARCH_KEY = _parsed_search_url.password + +DEFAULT_FROM_EMAIL = env("NEODB_EMAIL_FROM") +_parsed_email_url = env.url("NEODB_EMAIL_URL") +if _parsed_email_url.scheme == "anymail": + # "anymail://?" + # see https://anymail.dev/ + from urllib import parse + + EMAIL_BACKEND = _parsed_email_url.hostname + ANYMAIL = dict(parse.parse_qsl(_parsed_email_url.query)) +elif _parsed_email_url.scheme: + _parsed_email_config = env.email("NEODB_EMAIL_URL") + EMAIL_TIMEOUT = 5 + vars().update(_parsed_email_config) + +SITE_DOMAIN = env("NEODB_SITE_DOMAIN") +SITE_INFO = { + "site_name": env("NEODB_SITE_NAME"), + "site_domain": SITE_DOMAIN, + "site_url": env("NEODB_SITE_URL", default="https://" + SITE_DOMAIN), + "site_logo": env("NEODB_SITE_LOGO"), + "site_icon": env("NEODB_SITE_ICON"), + "user_icon": env("NEODB_USER_ICON"), + "site_intro": env("NEODB_SITE_INTRO"), + "site_links": [{"title": k, "url": v} for k, v in env("NEODB_SITE_LINKS").items()], +} + +SETUP_ADMIN_USERNAMES = env("NEODB_ADMIN_USERNAMES") + +INVITE_ONLY = env("NEODB_INVITE_ONLY") + +# By default, NeoDB will relay with relay.neodb.net so that public user ratings/etc can be shared across instances +# If you are running a development server, set this to True to disable this behavior +DISABLE_DEFAULT_RELAY = env("NEODB_DISABLE_DEFAULT_RELAY", default=DEBUG) + +MASTODON_ALLOWED_SITES = env("NEODB_LOGIN_MASTODON_WHITELIST") + +# Allow user to create account with email (and link to Mastodon account later) +ALLOW_EMAIL_ONLY_ACCOUNT = env.bool( + "NEODB_LOGIN_ENABLE_EMAIL_ONLY", + default=(_parsed_email_url.scheme and len(MASTODON_ALLOWED_SITES) == 0), # type: ignore +) + +# Allow user to login via any Mastodon/Pleroma sites +MASTODON_ALLOW_ANY_SITE = len(MASTODON_ALLOWED_SITES) == 0 + +REDIRECT_URIS = env( + "NEODB_LOGIN_MASTODON_REDIRECT_URI", + default=SITE_INFO["site_url"] + "/account/login/oauth", +) +# for sites migrated from previous version, either wipe mastodon client ids or use: +# REDIRECT_URIS = f'{SITE_INFO["site_url"]}/users/OAuth2_login/' + +# Timeout of requests to Mastodon, in seconds +MASTODON_TIMEOUT = env("NEODB_LOGIN_MASTODON_TIMEOUT", default=10) # type: ignore -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ +# Scope when creating Mastodon apps +# Alternatively, use "read write follow" to avoid re-authorize when migrating to a future version with more features +MASTODON_CLIENT_SCOPE = env( + "NEODB_MASTODON_CLIENT_SCOPE", + default="read:accounts read:follows read:search read:blocks read:mutes write:statuses write:media", # type: ignore +) -# SECURITY WARNING: use your own secret key and keep it! -SECRET_KEY = os.environ.get("NEODB_SECRET_KEY", "insecure") +# some Mastodon-compatible software like Pixelfed does not support granular scopes +MASTODON_LEGACY_CLIENT_SCOPE = "read write follow" + +# Emoji code in mastodon +STAR_SOLID = ":star_solid:" +STAR_HALF = ":star_half:" +STAR_EMPTY = ":star_empty:" + +DISCORD_WEBHOOKS = env("DISCORD_WEBHOOKS") +SPOTIFY_CREDENTIAL = env("SPOTIFY_API_KEY") +TMDB_API3_KEY = env("TMDB_API_V3_KEY") +# TMDB_API4_KEY = env('TMDB_API_V4_KEY') +# GOOGLE_API_KEY = env('GOOGLE_API_KEY') +DISCOGS_API_KEY = env("DISCOGS_API_KEY") +IGDB_CLIENT_ID = env("IGDB_API_CLIENT_ID") +IGDB_CLIENT_SECRET = env("IGDB_API_CLIENT_SECRET") +SLACK_TOKEN = env("SLACK_API_TOKEN") +SLACK_CHANNEL = "alert" + +DOWNLOADER_PROXY_LIST = env("NEODB_DOWNLOADER_PROXY_LIST") +DOWNLOADER_BACKUP_PROXY = env("NEODB_DOWNLOADER_BACKUP_PROXY", default="") # type: ignore +DOWNLOADER_REQUEST_TIMEOUT = env("NEODB_DOWNLOADER_REQUEST_TIMEOUT") +DOWNLOADER_CACHE_TIMEOUT = env("NEODB_DOWNLOADER_CACHE_TIMEOUT") +DOWNLOADER_RETRIES = env("NEODB_DOWNLOADER_RETRIES") +# ====== USER CONFIGUTRATION END ====== -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.environ.get("NEODB_DEBUG", "") != "" +DATABASE_ROUTERS = ["takahe.db_routes.TakaheRouter"] + +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" +# for legacy deployment: +# DEFAULT_AUTO_FIELD = "django.db.models.AutoField" ALLOWED_HOSTS = ["*"] @@ -53,7 +251,7 @@ "user_messages", "fontawesomefree", "corsheaders", - # "anymail", + "anymail", # "silk", ] @@ -66,6 +264,7 @@ "journal.apps.JournalConfig", "social.apps.SocialConfig", "developer.apps.DeveloperConfig", + "takahe.apps.TakaheConfig", "legacy.apps.LegacyConfig", ] @@ -112,48 +311,42 @@ WSGI_APPLICATION = "boofilsic.wsgi.application" -CACHES = { - "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", - } -} - -# Database -# https://docs.djangoproject.com/en/3.0/ref/settings/#databases - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.postgresql", - "NAME": os.environ.get("NEODB_DB_NAME", "test"), - "USER": os.environ.get("NEODB_DB_USER", "postgres"), - "PASSWORD": os.environ.get("NEODB_DB_PASSWORD", "admin123"), - "HOST": os.environ.get("NEODB_DB_HOST", "127.0.0.1"), - "PORT": int(os.environ.get("NEODB_DB_PORT", 5432)), - "OPTIONS": { - "client_encoding": "UTF8", - # 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_DEFAULT, - }, - } -} - -# Customized auth backend, glue OAuth2 and Django User model together -# https://docs.djangoproject.com/en/3.0/topics/auth/customizing/#authentication-backends +SESSION_COOKIE_NAME = "neodbsid" AUTHENTICATION_BACKENDS = [ "mastodon.auth.OAuth2Backend", "oauth2_provider.backends.OAuth2Backend", ] +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "handlers": { + "console": {"class": "logging.StreamHandler"}, + }, + "loggers": { + "": { + "handlers": ["console"], + "level": env("NEODB_LOG_LEVEL", default="DEBUG" if DEBUG else "INFO"), # type: ignore + }, + }, +} -MARKDOWNX_MARKDOWNIFY_FUNCTION = "journal.models.render_md" +if SLACK_TOKEN: + LOGGING["handlers"]["slack"] = { + "level": "ERROR", + "class": "django_slack.log.SlackExceptionHandler", + } + LOGGING["loggers"]["django"] = {"handlers": ["slack"], "level": "ERROR"} +MARKDOWNX_MARKDOWNIFY_FUNCTION = "journal.models.render_md" # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = "zh-hans" -TIME_ZONE = "Asia/Shanghai" +TIME_ZONE = env("NEODB_TIMEZONE", default="Asia/Shanghai") # type: ignore USE_I18N = True @@ -161,26 +354,27 @@ USE_TZ = True - USE_X_FORWARDED_HOST = True SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") DATA_UPLOAD_MAX_MEMORY_SIZE = 100 * 1024 * 1024 CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True -if os.getenv("NEODB_SSL", "") != "": +if env("NEODB_SSL", default="") != "": # type: ignore + # FIXME: remove this since user may enforce SSL in reverse proxy SECURE_SSL_REDIRECT = True SECURE_HSTS_PRELOAD = True SECURE_HSTS_INCLUDE_SUBDOMAINS = True SECURE_HSTS_SECONDS = 31536000 -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.0/howto/static-files/ +STATIC_URL = "/s/" +STATIC_ROOT = env("NEODB_STATIC_ROOT", default=os.path.join(BASE_DIR, "static/")) # type: ignore -STATIC_URL = "/static/" -STATIC_ROOT = os.environ.get("NEODB_STATIC_ROOT", os.path.join(BASE_DIR, "static/")) +if DEBUG: + # django-sass-processor will generate neodb.css on-the-fly when DEBUG + # NEODB_STATIC_ROOT is readonly in docker mode, so we give it a writable place + SASS_PROCESSOR_ROOT = "/tmp" -STATICFILES_STORAGE = "django.contrib.staticfiles.storage.ManifestStaticFilesStorage" STATICFILES_FINDERS = [ "django.contrib.staticfiles.finders.FileSystemFinder", "django.contrib.staticfiles.finders.AppDirectoriesFinder", @@ -191,25 +385,35 @@ SILENCED_SYSTEM_CHECKS = [ "admin.E404", # Required by django-user-messages + "models.W035", # Required by takahe: identical table name in different database + "fields.W344", # Required by takahe: identical table name in different database ] -MEDIA_URL = "/media/" -MEDIA_ROOT = os.environ.get("NEODB_MEDIA_ROOT", os.path.join(BASE_DIR, "media/")) +MEDIA_URL = "/m/" +MEDIA_ROOT = env("NEODB_MEDIA_ROOT", default=os.path.join(BASE_DIR, "media")) # type: ignore -SITE_DOMAIN = os.environ.get("NEODB_SITE_DOMAIN", "nicedb.org") -SITE_INFO = { - "site_name": os.environ.get("NEODB_SITE_NAME", "NiceDB"), - "site_domain": SITE_DOMAIN, - "site_url": os.environ.get("NEODB_SITE_URL", "https://" + SITE_DOMAIN), - "support_link": "https://github.com/doubaniux/boofilsic/issues", - "social_link": "https://donotban.com/@testie", - "donation_link": "https://patreon.com/tertius", - "settings_module": os.getenv("DJANGO_SETTINGS_MODULE"), +TAKAHE_MEDIA_URL = env("TAKAHE_MEDIA_URL", default="/media/") # type: ignore +TAKAHE_MEDIA_ROOT = env("TAKAHE_MEDIA_ROOT", default="media") # type: ignore + +STORAGES = { # TODO: support S3 + "default": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + }, + "staticfiles": { + "BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage", + }, + "takahe": { + "BACKEND": "django.core.files.storage.FileSystemStorage", + "OPTIONS": { + "location": TAKAHE_MEDIA_ROOT, + "base_url": TAKAHE_MEDIA_URL, + }, + }, } -REDIRECT_URIS = SITE_INFO["site_url"] + "/account/login/oauth" -# for sites migrated from previous version, either wipe mastodon client ids or use: -# REDIRECT_URIS = f'{SITE_INFO["site_url"]}/users/OAuth2_login/' +CSRF_TRUSTED_ORIGINS = [SITE_INFO["site_url"]] +if DEBUG: + CSRF_TRUSTED_ORIGINS += ["http://127.0.0.1:8000", "http://localhost:8000"] # Path to save report related images, ends with slash REPORT_MEDIA_PATH_ROOT = "report/" @@ -229,59 +433,11 @@ SYNC_FILE_PATH_ROOT = "sync/" EXPORT_FILE_PATH_ROOT = "export/" -# Allow user to login via any Mastodon/Pleroma sites -MASTODON_ALLOW_ANY_SITE = True - -# Allow user to create account with email (and link to Mastodon account later) -ALLOW_EMAIL_ONLY_ACCOUNT = False - -# Timeout of requests to Mastodon, in seconds -MASTODON_TIMEOUT = 30 - -MASTODON_CLIENT_SCOPE = "read write follow" -# use the following if it's a new site -# MASTODON_CLIENT_SCOPE = 'read:accounts read:follows read:search read:blocks read:mutes write:statuses write:media' - -MASTODON_LEGACY_CLIENT_SCOPE = "read write follow" - -# Emoji code in mastodon -STAR_SOLID = ":star_solid:" -STAR_HALF = ":star_half:" -STAR_EMPTY = ":star_empty:" - # Default redirect loaction when access login required view LOGIN_URL = "/account/login" -# Admin site root url -ADMIN_URL = "tertqX7256n7ej8nbv5cwvsegdse6w7ne5rHd" - -SCRAPING_TIMEOUT = 90 - -# ScraperAPI api key -SCRAPERAPI_KEY = "***REMOVED***" -PROXYCRAWL_KEY = None -SCRAPESTACK_KEY = None - -# Spotify credentials -SPOTIFY_CREDENTIAL = "***REMOVED***" - -# IMDb API service https://imdb-api.com/ -IMDB_API_KEY = "***REMOVED***" - -# The Movie Database (TMDB) API Keys -TMDB_API3_KEY = "***REMOVED***" -# TMDB_API4_KEY = "deadbeef.deadbeef.deadbeef" - -# Google Books API Key -GOOGLE_API_KEY = "***REMOVED***" - -# Discogs API Key -# How to get: a personal access token from https://www.discogs.com/settings/developers -DISCOGS_API_KEY = "***REMOVED***" - -# IGDB -IGDB_CLIENT_ID = "deadbeef" -IGDB_CLIENT_SECRET = "" +ADMIN_ENABLED = DEBUG +ADMIN_URL = "neodb-admin" BLEACH_STRIP_COMMENTS = True BLEACH_STRIP_TAGS = True @@ -298,83 +454,16 @@ }, } # THUMBNAIL_PRESERVE_EXTENSIONS = ('svg',) -if DEBUG: - THUMBNAIL_DEBUG = True +THUMBNAIL_DEBUG = DEBUG -# https://django-debug-toolbar.readthedocs.io/en/latest/ -# maybe benchmarking before deployment +DJANGO_REDIS_IGNORE_EXCEPTIONS = not DEBUG -REDIS_HOST = os.environ.get("NEODB_REDIS_HOST", "127.0.0.1") -REDIS_PORT = int(os.environ.get("NEODB_REDIS_PORT", 6379)) -REDIS_DB = int(os.environ.get("NEODB_REDIS_DB", 0)) - -RQ_QUEUES = { - "mastodon": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "export": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "import": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "fetch": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "crawl": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, - "doufen": { - "HOST": REDIS_HOST, - "PORT": REDIS_PORT, - "DB": REDIS_DB, - "DEFAULT_TIMEOUT": -1, - }, -} - -RQ_SHOW_ADMIN_LINK = True +RQ_SHOW_ADMIN_LINK = DEBUG SEARCH_INDEX_NEW_ONLY = False -SEARCH_BACKEND = None - -# SEARCH_BACKEND = 'MEILISEARCH' -# MEILISEARCH_SERVER = 'http://127.0.0.1:7700' -# MEILISEARCH_KEY = 'deadbeef' +DOWNLOADER_SAVEDIR = env("NEODB_DOWNLOADER_SAVE_DIR", default=None) # type: ignore -if os.environ.get("NEODB_TYPESENSE_ENABLE", ""): - SEARCH_BACKEND = "TYPESENSE" - -TYPESENSE_CONNECTION = { - "api_key": os.environ.get("NEODB_TYPESENSE_KEY", "insecure"), - "nodes": [ - { - "host": os.environ.get("NEODB_TYPESENSE_HOST", "127.0.0.1"), - "port": os.environ.get("NEODB_TYPESENSE_PORT", "8108"), - "protocol": "http", - } - ], - "connection_timeout_seconds": 2, -} - - -DOWNLOADER_RETRIES = 3 -DOWNLOADER_SAVEDIR = None DISABLE_MODEL_SIGNAL = False # disable index and social feeds during importing/etc # MAINTENANCE_MODE = False @@ -389,9 +478,6 @@ # SILKY_MAX_RESPONSE_BODY_SIZE = 1024 # If response body>1024 bytes, ignore # SILKY_INTERCEPT_PERCENT = 10 -DISCORD_WEBHOOKS = {"user-report": None} - - NINJA_PAGINATION_PER_PAGE = 20 OAUTH2_PROVIDER = { "ACCESS_TOKEN_EXPIRE_SECONDS": 3600 * 24 * 365, @@ -413,4 +499,5 @@ # "PATCH", "POST", # "PUT", -) \ No newline at end of file +) +DEFAULT_RELAY_SERVER = "https://relay.neodb.net/actor" diff --git a/catalog/book/models.py b/catalog/book/models.py index 5be14f2d..d451b9bf 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -166,7 +166,7 @@ def update_linked_items_from_external_resource(self, resource): """add Work from resource.metadata['work'] if not yet""" links = resource.required_resources + resource.related_resources for w in links: - if w["model"] == "Work": + if w.get("model") == "Work": work = Work.objects.filter( primary_lookup_id_type=w["id_type"], primary_lookup_id_value=w["id_value"], diff --git a/catalog/common/__init__.py b/catalog/common/__init__.py index 51f055af..fe04dde8 100644 --- a/catalog/common/__init__.py +++ b/catalog/common/__init__.py @@ -24,6 +24,7 @@ "use_local_response", "RetryDownloader", "BasicDownloader", + "CachedDownloader", "ProxiedDownloader", "BasicImageDownloader", "ProxiedImageDownloader", diff --git a/catalog/common/downloaders.py b/catalog/common/downloaders.py index a579a1e8..0ecbb208 100644 --- a/catalog/common/downloaders.py +++ b/catalog/common/downloaders.py @@ -10,6 +10,7 @@ import filetype import requests from django.conf import settings +from django.core.cache import cache from lxml import html from PIL import Image from requests import Response @@ -140,7 +141,7 @@ def __init__(self, url, headers=None): self.headers = headers def get_timeout(self): - return settings.SCRAPING_TIMEOUT + return settings.DOWNLOADER_REQUEST_TIMEOUT def validate_response(self, response): if response is None: @@ -153,7 +154,6 @@ def validate_response(self, response): def _download(self, url) -> Tuple[DownloaderResponse | MockResponse, int]: try: if not _mock_mode: - # TODO cache = get/set from redis resp = requests.get( url, headers=self.headers, timeout=self.get_timeout() ) @@ -191,26 +191,17 @@ def download(self): class ProxiedDownloader(BasicDownloader): def get_proxied_urls(self): + if not settings.DOWNLOADER_PROXY_LIST: + return [self.url] urls = [] - if settings.SCRAPESTACK_KEY is not None: - # urls.append(f'http://api.scrapestack.com/scrape?access_key={settings.SCRAPESTACK_KEY}&url={self.url}') - urls.append( - f"http://api.scrapestack.com/scrape?keep_headers=1&access_key={settings.SCRAPESTACK_KEY}&url={quote(self.url)}" - ) - if settings.PROXYCRAWL_KEY is not None: - urls.append( - f"https://api.proxycrawl.com/?token={settings.PROXYCRAWL_KEY}&url={quote(self.url)}" - ) - if settings.SCRAPERAPI_KEY is not None: - urls.append( - f"http://api.scraperapi.com/?api_key={settings.SCRAPERAPI_KEY}&url={quote(self.url)}" - ) + for p in settings.DOWNLOADER_PROXY_LIST: + urls.append(p.replace("__URL__", quote(self.url))) return urls def get_special_proxied_url(self): return ( - f"{settings.LOCAL_PROXY}?url={quote(self.url)}" - if settings.LOCAL_PROXY is not None + settings.DOWNLOADER_BACKUP_PROXY.replace("__URL__", quote(self.url)) + if settings.DOWNLOADER_BACKUP_PROXY else None ) @@ -256,6 +247,19 @@ def download(self): raise DownloadError(self, "max out of retries") +class CachedDownloader(BasicDownloader): + def download(self): + cache_key = "dl:" + self.url + resp = cache.get(cache_key) + if resp: + self.response_type = RESPONSE_OK + else: + resp = super().download() + if self.response_type == RESPONSE_OK: + cache.set(cache_key, resp, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) + return resp + + class ImageDownloaderMixin: def __init__(self, url, referer=None): self.extention = None diff --git a/catalog/common/models.py b/catalog/common/models.py index e0818efa..949909e8 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -13,7 +13,7 @@ from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ -from ninja import Schema +from ninja import Field, Schema from polymorphic.models import PolymorphicModel from catalog.common import jsondata @@ -46,6 +46,7 @@ class SiteName(models.TextChoices): RSS = "rss", _("RSS") Discogs = "discogs", _("Discogs") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") class IdType(models.TextChoices): @@ -90,6 +91,7 @@ class IdType(models.TextChoices): Bangumi = "bangumi", _("Bangumi") ApplePodcast = "apple_podcast", _("苹果播客") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") IdealIdTypes = [ @@ -225,6 +227,8 @@ class ExternalResourceSchema(Schema): class BaseSchema(Schema): + id: str = Field(alias="absolute_url") + type: str = Field(alias="ap_object_type") uuid: str url: str api_url: str @@ -250,7 +254,7 @@ class Item(SoftDeleteMixin, PolymorphicModel): url_path = "item" # subclass must specify this type = None # subclass must specify this parent_class = None # subclass may specify this to allow create child item - category: ItemCategory | None = None # subclass must specify this + category: ItemCategory # subclass must specify this demonstrative: "_StrOrPromise | None" = None # subclass must specify this uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True) title = models.CharField(_("标题"), max_length=1000, default="") @@ -345,8 +349,27 @@ def set_parent_item(self, value): def parent_uuid(self): return self.parent_item.uuid if self.parent_item else None + @classmethod + def get_ap_object_type(cls): + return cls.__name__ + + @property + def ap_object_type(self): + return self.get_ap_object_type() + + @property + def ap_object_ref(self): + o = { + "type": self.get_ap_object_type(), + "url": self.absolute_url, + "name": self.title, + } + if self.has_cover(): + o["image"] = self.cover_image_url + return o + def log_action(self, changes): - LogEntry.objects.log_create( + LogEntry.objects.log_create( # type: ignore self, action=LogEntry.Action.UPDATE, changes=changes ) @@ -561,10 +584,13 @@ class ExternalResource(models.Model): edited_time = models.DateTimeField(auto_now=True) required_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links required to generate Item from this resource, e.g. parent TVShow of TVSeason related_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links related to this resource which may be fetched later, e.g. sub TVSeason of TVShow + prematched_resources = jsondata.ArrayField( + models.CharField(), null=False, blank=False, default=list + ) # links to help match an existing Item from this resource class Meta: unique_together = [["id_type", "id_value"]] @@ -585,13 +611,24 @@ def get_site(self): return SiteManager.get_site_cls_by_id_type(self.id_type) @property - def site_name(self): + def site_name(self) -> SiteName: try: - return self.get_site().SITE_NAME + site = self.get_site() + return site.SITE_NAME if site else SiteName.Unknown except: _logger.warning(f"Unknown site for {self}") return SiteName.Unknown + @property + def site_label(self): + if self.id_type == IdType.Fediverse: + from takahe.utils import Takahe + + domain = self.id_value.split("://")[1].split("/")[0] + n = Takahe.get_node_name_for_domain(domain) + return n or domain + return self.site_name.label + def update_content(self, resource_content): self.other_lookup_ids = resource_content.lookup_ids self.metadata = resource_content.metadata @@ -615,7 +652,16 @@ def get_all_lookup_ids(self): d = {k: v for k, v in d.items() if bool(v)} return d - def get_preferred_model(self) -> type[Item] | None: + def get_lookup_ids(self, default_model): + lookup_ids = self.get_all_lookup_ids() + model = self.get_item_model(default_model) + bt, bv = model.get_best_lookup_id(lookup_ids) + ids = [(t, v) for t, v in lookup_ids.items() if t and v and t != bt] + if bt and bv: + ids = [(bt, bv)] + ids + return ids + + def get_item_model(self, default_model: type[Item]) -> type[Item]: model = self.metadata.get("preferred_model") if model: m = ContentType.objects.filter( @@ -625,7 +671,7 @@ def get_preferred_model(self) -> type[Item] | None: return cast(Item, m).model_class() else: raise ValueError(f"preferred model {model} does not exist") - return None + return default_model _CONTENT_TYPE_LIST = None diff --git a/catalog/common/sites.py b/catalog/common/sites.py index d6b6f11e..1777864b 100644 --- a/catalog/common/sites.py +++ b/catalog/common/sites.py @@ -39,7 +39,7 @@ class AbstractSite: Abstract class to represent a site """ - SITE_NAME: SiteName | None = None + SITE_NAME: SiteName ID_TYPE: IdType | None = None WIKI_PROPERTY_ID: str | None = "P0undefined0" DEFAULT_MODEL: Type[Item] | None = None @@ -104,18 +104,29 @@ def query_str(content, query: str) -> str: return content.xpath(query)[0].strip() @classmethod - def get_model_for_resource(cls, resource): - model = resource.get_preferred_model() - return model or cls.DEFAULT_MODEL + def match_existing_item_for_resource( + cls, resource: ExternalResource + ) -> Item | None: + """ + try match an existing Item for a given ExternalResource - @classmethod - def match_existing_item_for_resource(cls, resource) -> Item | None: - model = cls.get_model_for_resource(resource) + order of matching: + 1. look for other ExternalResource by url in prematched_resources, if found, return the item + 2. look for Item by primary_lookup_id_type and primary_lookup_id_value + + """ + for resource_link in resource.prematched_resources: # type: ignore + url = resource_link.get("url") + if url: + matched_resource = ExternalResource.objects.filter(url=url).first() + if matched_resource and matched_resource.item: + return matched_resource.item + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None - t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) - matched = None - if t is not None: + ids = resource.get_lookup_ids(cls.DEFAULT_MODEL) + for t, v in ids: + matched = None matched = model.objects.filter( primary_lookup_id_type=t, primary_lookup_id_value=v, @@ -143,14 +154,15 @@ def match_existing_item_for_resource(cls, resource) -> Item | None: matched.primary_lookup_id_type = t matched.primary_lookup_id_value = v matched.save() - return matched + if matched: + return matched @classmethod def match_or_create_item_for_resource(cls, resource): previous_item = resource.item resource.item = cls.match_existing_item_for_resource(resource) or previous_item if resource.item is None: - model = cls.get_model_for_resource(resource) + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) @@ -243,7 +255,7 @@ def get_resource_ready( ) else: _logger.error(f"unable to get site for {linked_url}") - if p.related_resources: + if p.related_resources or p.prematched_resources: django_rq.get_queue("crawl").enqueue(crawl_related_resources_task, p.pk) if p.item: p.item.update_linked_items_from_external_resource(p) @@ -318,7 +330,7 @@ def crawl_related_resources_task(resource_pk): if not resource: _logger.warn(f"crawl resource not found {resource_pk}") return - links = resource.related_resources + links = (resource.related_resources or []) + (resource.prematched_resources or []) # type: ignore for w in links: # type: ignore try: item = None diff --git a/catalog/common/utils.py b/catalog/common/utils.py index 0882af5d..08023c09 100644 --- a/catalog/common/utils.py +++ b/catalog/common/utils.py @@ -36,4 +36,4 @@ def piece_cover_path(item, filename): + "." + filename.split(".")[-1] ) - return f"user/{item.owner_id}/{fn}" + return f"user/{item.owner_id or '_'}/{fn}" diff --git a/catalog/jobs/__init__.py b/catalog/jobs/__init__.py new file mode 100644 index 00000000..a2ebd698 --- /dev/null +++ b/catalog/jobs/__init__.py @@ -0,0 +1,2 @@ +from .discover import DiscoverGenerator +from .podcast import PodcastUpdater diff --git a/catalog/jobs/discover.py b/catalog/jobs/discover.py new file mode 100644 index 00000000..5abfe6cf --- /dev/null +++ b/catalog/jobs/discover.py @@ -0,0 +1,95 @@ +from datetime import timedelta + +from django.core.cache import cache +from django.db.models import Count, F +from django.utils import timezone +from loguru import logger + +from catalog.models import * +from common.models import BaseJob, JobManager +from journal.models import Comment, ShelfMember, q_item_in_category + +MAX_ITEMS_PER_PERIOD = 12 +MIN_MARKS = 1 +MAX_DAYS_FOR_PERIOD = 96 +MIN_DAYS_FOR_PERIOD = 6 + + +@JobManager.register +class DiscoverGenerator(BaseJob): + interval = timedelta(hours=3) + + def get_popular_marked_item_ids(self, category, days, exisiting_ids): + item_ids = [ + m["item_id"] + for m in ShelfMember.objects.filter(q_item_in_category(category)) + .filter(created_time__gt=timezone.now() - timedelta(days=days)) + .exclude(item_id__in=exisiting_ids) + .values("item_id") + .annotate(num=Count("item_id")) + .filter(num__gte=MIN_MARKS) + .order_by("-num")[:MAX_ITEMS_PER_PERIOD] + ] + return item_ids + + def get_popular_commented_podcast_ids(self, days, exisiting_ids): + return list( + Comment.objects.filter(q_item_in_category(ItemCategory.Podcast)) + .filter(created_time__gt=timezone.now() - timedelta(days=days)) + .annotate(p=F("item__podcastepisode__program")) + .filter(p__isnull=False) + .exclude(p__in=exisiting_ids) + .values("p") + .annotate(num=Count("p")) + .filter(num__gte=MIN_MARKS) + .order_by("-num") + .values_list("p", flat=True)[:MAX_ITEMS_PER_PERIOD] + ) + + def cleanup_shows(self, items): + seasons = [i for i in items if i.__class__ == TVSeason] + for season in seasons: + if season.show in items: + items.remove(season.show) + return items + + def run(self): + logger.info("Discover data update start.") + cache_key = "public_gallery" + gallery_categories = [ + ItemCategory.Book, + ItemCategory.Movie, + ItemCategory.TV, + ItemCategory.Game, + ItemCategory.Music, + ItemCategory.Podcast, + ] + gallery_list = [] + for category in gallery_categories: + days = MAX_DAYS_FOR_PERIOD + item_ids = [] + while days >= MIN_DAYS_FOR_PERIOD: + ids = self.get_popular_marked_item_ids(category, days, item_ids) + logger.info(f"Most marked {category} in last {days} days: {len(ids)}") + item_ids = ids + item_ids + days //= 2 + if category == ItemCategory.Podcast: + days = MAX_DAYS_FOR_PERIOD // 4 + extra_ids = self.get_popular_commented_podcast_ids(days, item_ids) + logger.info( + f"Most commented podcast in last {days} days: {len(extra_ids)}" + ) + item_ids = extra_ids + item_ids + items = [Item.objects.get(pk=i) for i in item_ids] + if category == ItemCategory.TV: + items = self.cleanup_shows(items) + gallery_list.append( + { + "name": "popular_" + category.value, + "title": "" + + (category.label if category != ItemCategory.Book else "图书"), + "items": items, + } + ) + cache.set(cache_key, gallery_list, timeout=None) + logger.info("Discover data updated.") diff --git a/catalog/jobs/podcast.py b/catalog/jobs/podcast.py new file mode 100644 index 00000000..3b464854 --- /dev/null +++ b/catalog/jobs/podcast.py @@ -0,0 +1,35 @@ +import pprint +from datetime import timedelta +from time import sleep + +from loguru import logger + +from catalog.common.models import IdType +from catalog.models import Podcast +from catalog.sites import RSS +from common.models import BaseJob, JobManager + + +@JobManager.register +class PodcastUpdater(BaseJob): + interval = timedelta(hours=2) + + def run(self): + logger.info("Podcasts update start.") + count = 0 + qs = Podcast.objects.filter( + is_deleted=False, merged_to_item__isnull=True + ).order_by("pk") + for p in qs: + if ( + p.primary_lookup_id_type == IdType.RSS + and p.primary_lookup_id_value is not None + ): + logger.info(f"updating {p}") + c = p.episodes.count() + site = RSS(p.feed_url) + site.scrape_additional_data() + c2 = p.episodes.count() + logger.info(f"updated {p}, {c2-c} new episodes.") + count += c2 - c + logger.info(f"Podcasts update finished, {count} new episodes total.") diff --git a/catalog/management/commands/cat.py b/catalog/management/commands/cat.py index 6fddbc65..f2c13cd1 100644 --- a/catalog/management/commands/cat.py +++ b/catalog/management/commands/cat.py @@ -31,10 +31,17 @@ def handle(self, *args, **options): self.stdout.write(f"Fetching from {site}") if options["save"]: resource = site.get_resource_ready(ignore_existing_content=options["force"]) - pprint.pp(resource.metadata) - pprint.pp(site.get_item()) - pprint.pp(site.get_item().cover) - pprint.pp(site.get_item().metadata) + if resource: + pprint.pp(resource.metadata) + else: + self.stdout.write(self.style.ERROR(f"Unable to get resource for {url}")) + item = site.get_item() + if item: + pprint.pp(item.cover) + pprint.pp(item.metadata) + pprint.pp(item.absolute_url) + else: + self.stdout.write(self.style.ERROR(f"Unable to get item for {url}")) else: resource = site.scrape() pprint.pp(resource.metadata) diff --git a/catalog/management/commands/crawl.py b/catalog/management/commands/crawl.py index cacc368f..241fb0a8 100644 --- a/catalog/management/commands/crawl.py +++ b/catalog/management/commands/crawl.py @@ -29,16 +29,19 @@ def handle(self, *args, **options): logger.info(f"Navigating {url}") content = ProxiedDownloader(url).download().html() urls = content.xpath("//a/@href") - for _u in urls: + for _u in urls: # type:ignore u = urljoin(url, _u) if u not in history and u not in queue: if len([p for p in item_patterns if re.match(p, u)]) > 0: site = SiteManager.get_site_by_url(u) - u = site.url - if u not in history: - history.append(u) - logger.info(f"Fetching {u}") - site.get_resource_ready() + if site: + u = site.url + if u not in history: + history.append(u) + logger.info(f"Fetching {u}") + site.get_resource_ready() + else: + logger.warning(f"unable to parse {u}") elif pattern and u.find(pattern) >= 0: queue.append(u) logger.info("Crawl finished.") diff --git a/catalog/management/commands/discover.py b/catalog/management/commands/discover.py index 32e316ba..d259189a 100644 --- a/catalog/management/commands/discover.py +++ b/catalog/management/commands/discover.py @@ -7,7 +7,7 @@ from loguru import logger from catalog.models import * -from journal.models import Comment, ShelfMember, query_item_category +from journal.models import Comment, ShelfMember, q_item_in_category MAX_ITEMS_PER_PERIOD = 12 MIN_MARKS = 2 @@ -28,7 +28,7 @@ def add_arguments(self, parser): def get_popular_marked_item_ids(self, category, days, exisiting_ids): item_ids = [ m["item_id"] - for m in ShelfMember.objects.filter(query_item_category(category)) + for m in ShelfMember.objects.filter(q_item_in_category(category)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .exclude(item_id__in=exisiting_ids) .values("item_id") @@ -40,7 +40,7 @@ def get_popular_marked_item_ids(self, category, days, exisiting_ids): def get_popular_commented_podcast_ids(self, days, exisiting_ids): return list( - Comment.objects.filter(query_item_category(ItemCategory.Podcast)) + Comment.objects.filter(q_item_in_category(ItemCategory.Podcast)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .annotate(p=F("item__podcastepisode__program")) .filter(p__isnull=False) diff --git a/catalog/management/commands/index.py b/catalog/management/commands/index.py index d7916761..1a07cb59 100644 --- a/catalog/management/commands/index.py +++ b/catalog/management/commands/index.py @@ -1,6 +1,7 @@ import pprint from datetime import timedelta from time import sleep +from typing import TYPE_CHECKING from django.conf import settings from django.core.management.base import BaseCommand @@ -8,7 +9,8 @@ from django.utils import timezone from tqdm import tqdm -from catalog.models import * +from catalog.models import Item +from catalog.search.typesense import Indexer BATCH_SIZE = 1000 diff --git a/catalog/migrations/0011_alter_externalresource_id_type_and_more.py b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py new file mode 100644 index 00000000..3659a6d5 --- /dev/null +++ b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py @@ -0,0 +1,117 @@ +# Generated by Django 4.2.3 on 2023-08-06 02:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("catalog", "0011_remove_item_last_editor"), + ] + + operations = [ + migrations.AlterField( + model_name="externalresource", + name="id_type", + field=models.CharField( + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="IdType of the source site", + ), + ), + migrations.AlterField( + model_name="itemlookupid", + name="id_type", + field=models.CharField( + blank=True, + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="源网站", + ), + ), + ] diff --git a/catalog/search/external.py b/catalog/search/external.py index 51da0806..c11a8b97 100644 --- a/catalog/search/external.py +++ b/catalog/search/external.py @@ -1,6 +1,9 @@ +import asyncio import logging +import time from urllib.parse import quote_plus +import httpx import requests from django.conf import settings from lxml import html @@ -23,7 +26,8 @@ def __init__( "all": [ { "url": source_url, - "site_name": {"label": source_site, "value": source_site}, + "site_name": source_site, + "site_label": source_site, } ] } @@ -282,12 +286,64 @@ def search(cls, q, page=1): return results +class Fediverse: + @staticmethod + async def search_task(host, q, category=None): + async with httpx.AsyncClient() as client: + results = [] + try: + response = await client.get( + f"https://{host}/api/catalog/search?query={q}&category={category or ''}", + timeout=2, + ) + r = response.json() + except: + return [] + if "data" in r: + for item in r["data"]: + url = f"https://{host}{item['url']}" # FIXME update API and use abs urls + try: + cat = ItemCategory(item["category"]) + except: + cat = "" + results.append( + SearchResultItem( + cat, + host, + url, + item["display_title"], + "", + item["brief"], + item["cover_image_url"], + ) + ) + return results + + @classmethod + def search(cls, q, page=1, category=None): + from takahe.utils import Takahe + + peers = Takahe.get_neodb_peers() + # peers = ["neodb.social", "green.eggplant.place"] + tasks = [Fediverse.search_task(host, q, category) for host in peers] + # loop = asyncio.get_event_loop() + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + results = [] + for r in loop.run_until_complete(asyncio.gather(*tasks)): + results.extend(r) + return results + + class ExternalSources: @classmethod def search(cls, c, q, page=1): if not q: return [] results = [] + results.extend( + Fediverse.search(q, page, category=c if c and c != "all" else None) + ) if c == "" or c is None: c = "all" if c == "all" or c == "movietv": diff --git a/catalog/search/models.py b/catalog/search/models.py index 3607ea4f..9f5def49 100644 --- a/catalog/search/models.py +++ b/catalog/search/models.py @@ -19,6 +19,10 @@ class DbIndexer: + @classmethod + def init(cls): + pass + @classmethod def search(cls, q, page=1, categories=None, tag=None, sort=None): result = lambda: None diff --git a/catalog/search/typesense.py b/catalog/search/typesense.py index 61b3e32c..eb2d83f8 100644 --- a/catalog/search/typesense.py +++ b/catalog/search/typesense.py @@ -2,19 +2,20 @@ import types from datetime import timedelta from pprint import pprint +from time import sleep import django_rq import typesense from django.conf import settings from django.db.models.signals import post_delete, post_save from django_redis import get_redis_connection +from loguru import logger from rq.job import Job from typesense.collection import Collection from typesense.exceptions import ObjectNotFound from catalog.models import Item -INDEX_NAME = "catalog" SEARCHABLE_ATTRIBUTES = [ "title", "orig_title", @@ -52,9 +53,6 @@ SEARCH_PAGE_SIZE = 20 -logger = logging.getLogger(__name__) - - _PENDING_INDEX_KEY = "pending_index_ids" _PENDING_INDEX_QUEUE = "import" _PENDING_INDEX_JOB_ID = "pending_index_flush" @@ -125,7 +123,7 @@ class Indexer: def instance(cls) -> Collection: if cls._instance is None: cls._instance = typesense.Client(settings.TYPESENSE_CONNECTION).collections[ - INDEX_NAME + settings.TYPESENSE_INDEX_NAME ] return cls._instance # type: ignore @@ -178,17 +176,37 @@ def config(cls): {"name": ".*", "optional": True, "locale": "zh", "type": "auto"}, ] return { - "name": INDEX_NAME, + "name": settings.TYPESENSE_INDEX_NAME, "fields": fields, # "default_sorting_field": "rating_count", } @classmethod def init(cls): - idx = typesense.Client(settings.TYPESENSE_CONNECTION).collections - if idx: - # idx.delete() - idx.create(cls.config()) + try: + client = typesense.Client(settings.TYPESENSE_CONNECTION) + wait = 5 + while not client.operations.is_healthy() and wait: + logger.warning("Typesense: server not healthy") + sleep(1) + wait -= 1 + idx = client.collections[settings.TYPESENSE_INDEX_NAME] + if idx: + try: + i = idx.retrieve() + logger.debug( + f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents" + ) + return + except: + client.collections.create(cls.config()) + logger.info( + f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created" + ) + return + logger.error("Typesense: server unknown error") + except Exception as e: + logger.error(f"Typesense: server error {e}") @classmethod def delete_index(cls): @@ -310,7 +328,7 @@ def delete_item(cls, obj): try: cls.instance().documents[pk].delete() except Exception as e: - logger.warn(f"delete item error: \n{e}") + logger.warning(f"delete item error: \n{e}") @classmethod def search(cls, q, page=1, categories=None, tag=None, sort=None): diff --git a/catalog/search/views.py b/catalog/search/views.py index d3ab3780..605fc19c 100644 --- a/catalog/search/views.py +++ b/catalog/search/views.py @@ -1,13 +1,11 @@ -import hashlib import logging -import uuid +import re import django_rq from django.conf import settings from django.contrib.auth.decorators import login_required from django.core.cache import cache from django.core.exceptions import BadRequest -from django.http import HttpResponseRedirect from django.shortcuts import redirect, render from django.utils.translation import gettext_lazy as _ from rq.job import Job @@ -15,7 +13,8 @@ from catalog.common.models import ItemCategory, SiteName from catalog.common.sites import AbstractSite, SiteManager from common.config import PAGE_LINK_NUMBER -from common.utils import PageLinksGenerator +from common.utils import HTTPResponseHXRedirect, PageLinksGenerator +from users.views import query_identity from ..models import * from .external import ExternalSources @@ -24,16 +23,7 @@ _logger = logging.getLogger(__name__) -class HTTPResponseHXRedirect(HttpResponseRedirect): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self["HX-Redirect"] = self["Location"] - - status_code = 200 - - def fetch_refresh(request, job_id): - retry = request.GET try: job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch")) item_url = job.return_value() @@ -102,6 +92,9 @@ def visible_categories(request): def search(request): + keywords = request.GET.get("q", default="").strip() + if re.match(r"^[@@]", keywords): + return query_identity(request, keywords.replace("@", "@")) category = request.GET.get("c", default="all").strip().lower() hide_category = False if category == "all" or not category: @@ -115,7 +108,6 @@ def search(request): hide_category = True except: categories = visible_categories(request) - keywords = request.GET.get("q", default="").strip() tag = request.GET.get("tag", default="").strip() p = request.GET.get("page", default="1") p = int(p) if p.isdigit() else 1 @@ -130,9 +122,14 @@ def search(request): ) if keywords.find("://") > 0: + host = keywords.split("://")[1].split("/")[0] + if host == settings.SITE_INFO["site_domain"]: + return redirect(keywords) site = SiteManager.get_site_by_url(keywords) if site: return fetch(request, keywords, False, site) + if request.GET.get("r"): + return redirect(keywords) items, num_pages, _, dup_items = query_index(keywords, categories, tag, p) return render( diff --git a/catalog/sites/__init__.py b/catalog/sites/__init__.py index 6fb4f868..7518ebfb 100644 --- a/catalog/sites/__init__.py +++ b/catalog/sites/__init__.py @@ -9,13 +9,14 @@ from .douban_game import DoubanGame from .douban_movie import DoubanMovie from .douban_music import DoubanMusic +from .fedi import FediverseInstance from .goodreads import Goodreads from .google_books import GoogleBooks from .igdb import IGDB from .imdb import IMDB - -# from .apple_podcast import ApplePodcast from .rss import RSS from .spotify import Spotify from .steam import Steam from .tmdb import TMDB_Movie + +# from .apple_podcast import ApplePodcast diff --git a/catalog/sites/fedi.py b/catalog/sites/fedi.py new file mode 100644 index 00000000..958b33ec --- /dev/null +++ b/catalog/sites/fedi.py @@ -0,0 +1,101 @@ +import re + +from django.core.validators import URLValidator +from loguru import logger + +from catalog.common import * +from catalog.models import * + + +@SiteManager.register +class FediverseInstance(AbstractSite): + SITE_NAME = SiteName.Fediverse + ID_TYPE = IdType.Fediverse + URL_PATTERNS = [] + WIKI_PROPERTY_ID = "" + DEFAULT_MODEL = None + id_type_mapping = { + "isbn": IdType.ISBN, + "imdb": IdType.IMDB, + "barcode": IdType.GTIN, + } + supported_types = { + "Book": Edition, + "Movie": Movie, + "TVShow": TVShow, + "TVSeason": TVSeason, + "TVEpisode": TVEpisode, + "Album": Album, + "Game": Game, + "Podcast": Podcast, + "Performance": Performance, + "PerformanceProduction": PerformanceProduction, + } + request_header = {"User-Agent": "NeoDB/0.5", "Accept": "application/activity+json"} + + @classmethod + def id_to_url(cls, id_value): + return id_value + + @classmethod + def url_to_id(cls, url: str): + u = url.split("://", 1)[1].split("/", 1) + return "https://" + u[0].lower() + "/" + u[1] + + @classmethod + def validate_url_fallback(cls, url): + val = URLValidator() + try: + val(url) + if ( + url.split("://", 1)[1].split("/", 1)[0].lower() + == settings.SITE_INFO["site_domain"] + ): + # disallow local instance URLs + return False + return cls.get_json_from_url(url) is not None + except Exception: + return False + + @classmethod + def get_json_from_url(cls, url): + j = CachedDownloader(url, headers=cls.request_header).download().json() + if j.get("type") not in cls.supported_types.keys(): + raise ValueError("Not a supported format or type") + if j.get("id") != url: + logger.warning(f"ID mismatch: {j.get('id')} != {url}") + return j + + def scrape(self): + data = self.get_json_from_url(self.url) + img_url = data.get("cover_image_url") + raw_img, img_ext = ( + BasicImageDownloader.download_image(img_url, None, headers={}) + if img_url + else (None, None) + ) + ids = {} + data["preferred_model"] = data.get("type") + data["prematched_resources"] = [] + for ext in data.get("external_resources", []): + site = SiteManager.get_site_by_url(ext.get("url")) + if site and site.ID_TYPE != self.ID_TYPE: + ids[site.ID_TYPE] = site.id_value + data["prematched_resources"].append( + { + "model": data["preferred_model"], + "id_type": site.ID_TYPE, + "id_value": site.id_value, + "url": site.url, + } + ) + # for k, v in self.id_type_mapping.items(): + # if data.get(k): + # ids[v] = data.get(k) + d = ResourceContent( + metadata=data, + cover_image=raw_img, + cover_image_extention=img_ext, + lookup_ids=ids, + ) + return d diff --git a/catalog/sites/rss.py b/catalog/sites/rss.py index 7089a511..11dba3c3 100644 --- a/catalog/sites/rss.py +++ b/catalog/sites/rss.py @@ -33,7 +33,8 @@ class RSS(AbstractSite): def parse_feed_from_url(url): if not url: return None - feed = cache.get(url) + cache_key = f"rss:{url}" + feed = cache.get(cache_key) if feed: return feed if get_mock_mode(): @@ -50,7 +51,7 @@ def parse_feed_from_url(url): feed, open(settings.DOWNLOADER_SAVEDIR + "/" + get_mock_file(url), "wb"), ) - cache.set(url, feed, timeout=300) + cache.set(cache_key, feed, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) return feed @classmethod diff --git a/catalog/templates/_item_card.html b/catalog/templates/_item_card.html index ade3588d..2e24c059 100644 --- a/catalog/templates/_item_card.html +++ b/catalog/templates/_item_card.html @@ -7,7 +7,7 @@
{% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_card_metadata_base.html b/catalog/templates/_item_card_metadata_base.html index 3cad9768..4915fdfd 100644 --- a/catalog/templates/_item_card_metadata_base.html +++ b/catalog/templates/_item_card_metadata_base.html @@ -15,7 +15,7 @@
{% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_comments.html b/catalog/templates/_item_comments.html index 61fd2b3d..07dc3622 100644 --- a/catalog/templates/_item_comments.html +++ b/catalog/templates/_item_comments.html @@ -46,15 +46,11 @@ data-uuid="{{ comment.item.uuid }}"> {% endif %} - - {% liked_piece comment as liked %} - {% include 'like_stats.html' with liked=liked piece=comment %} - - - - + {% if comment.latest_post %} + {% include "action_reply_piece.html" with post=comment.latest_post piece=comment %} + {% include "action_like_post.html" with post=comment.latest_post %} + {% include "action_open_post.html" with post=comment.latest_post %} + {% endif %} {% if comment.rating_grade %}{{ comment.rating_grade|rating_star }}{% endif %} @@ -70,6 +66,7 @@ {% if comment.item != item %}{{ comment.item.title }}{% endif %}
{{ comment.html|safe }}
+ {% if comment.latest_post %}
{% endif %} {% else %} + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_reviews.html b/catalog/templates/_item_reviews.html index c18590ce..5908c93d 100644 --- a/catalog/templates/_item_reviews.html +++ b/catalog/templates/_item_reviews.html @@ -18,7 +18,7 @@ + {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_user_pieces.html b/catalog/templates/_item_user_pieces.html index 04994860..3e90664a 100644 --- a/catalog/templates/_item_user_pieces.html +++ b/catalog/templates/_item_user_pieces.html @@ -66,7 +66,7 @@
+ {% if mark.comment.shared_link %} href="{{ mark.comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ mark.comment.created_time|date }} {% endcomment %} @@ -89,7 +89,7 @@
+ {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ comment.created_time|date }} {% endcomment %} @@ -127,7 +127,7 @@
+ {% if mark.review.shared_link %} href="{{ mark.review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.review.created_time|date }} diff --git a/catalog/templates/_sidebar_edit.html b/catalog/templates/_sidebar_edit.html index 1bd7a3f2..1a84f0bf 100644 --- a/catalog/templates/_sidebar_edit.html +++ b/catalog/templates/_sidebar_edit.html @@ -52,7 +52,7 @@
编辑选项
{% for res in item.external_resources.all %}
- {% trans '源网站' %}: {{ res.site_name.label }} + {% trans '源网站' %}: {{ res.site_label }}
{{ gallery.title }}
{% endif %} {% if request.user.is_authenticated %} - {% include "_sidebar.html" with show_progress=1 %} + {% include "_sidebar.html" with show_progress=1 identity=request.user.identity %} {% else %} {% include "_sidebar_anonymous.html" %} {% endif %} diff --git a/catalog/templates/item_base.html b/catalog/templates/item_base.html index bc3a2afe..81a83d11 100644 --- a/catalog/templates/item_base.html +++ b/catalog/templates/item_base.html @@ -43,7 +43,7 @@

{% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/item_mark_list.html b/catalog/templates/item_mark_list.html index e4da00f3..ed0c3505 100644 --- a/catalog/templates/item_mark_list.html +++ b/catalog/templates/item_mark_list.html @@ -43,7 +43,7 @@
+ {% if mark.shared_link %} href="{{ mark.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.created_time|date }} diff --git a/catalog/templates/item_review_list.html b/catalog/templates/item_review_list.html index aa4d3c2b..90e3339e 100644 --- a/catalog/templates/item_review_list.html +++ b/catalog/templates/item_review_list.html @@ -31,7 +31,7 @@
+ {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% liked_piece review as liked %} diff --git a/catalog/templates/search_results.html b/catalog/templates/search_results.html index 2648ed3f..baa0ea4d 100644 --- a/catalog/templates/search_results.html +++ b/catalog/templates/search_results.html @@ -93,7 +93,7 @@
{% trans '含有标签' %} “{{ request.GET.tag }}” {% trans '的结果' {% empty %}

无站内条目匹配。 - {% if user.is_authenticated %}系统会尝试搜索其它网站的条目,点击标题可添加到本站。{% endif %} + {% if request.user.is_authenticated %}系统会尝试搜索其它网站的条目,点击标题可添加到本站。{% endif %}

如果你在 @@ -116,7 +116,7 @@

{% trans '含有标签' %} “{{ request.GET.tag }}” {% trans '的结果' {% endif %}
- {% if request.GET.q and user.is_authenticated %} + {% if request.GET.q and request.user.is_authenticated %}

diff --git a/catalog/tv/models.py b/catalog/tv/models.py index 467234d5..525f7e27 100644 --- a/catalog/tv/models.py +++ b/catalog/tv/models.py @@ -437,7 +437,7 @@ class TVEpisode(Item): @property def display_title(self): - return f"{self.season.display_title} 第{self.episode_number}集" # TODO i18n + return f"{self.season.display_title if self.season else ''} 第{self.episode_number}集" # TODO i18n @property def parent_item(self): diff --git a/catalog/urls.py b/catalog/urls.py index 94125c23..3e41ccf6 100644 --- a/catalog/urls.py +++ b/catalog/urls.py @@ -129,8 +129,9 @@ def _get_all_url_paths(): mark_list, name="mark_list", ), - path("search/", search, name="search"), - path("search/external/", external_search, name="external_search"), + path("search", search, name="search"), + path("search/", search, name="search_legacy"), + path("search/external", external_search, name="external_search"), path("fetch_refresh/", fetch_refresh, name="fetch_refresh"), path("refetch", refetch, name="refetch"), path("unlink", unlink, name="unlink"), diff --git a/catalog/views.py b/catalog/views.py index 11ac0b3d..480d86fc 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -7,8 +7,10 @@ from django.db.models import Count from django.http import Http404 from django.shortcuts import get_object_or_404, redirect, render +from django.urls import reverse from django.utils.translation import gettext_lazy as _ from django.views.decorators.clickjacking import xframe_options_exempt +from django.views.decorators.http import require_http_methods from common.config import PAGE_LINK_NUMBER from common.utils import PageLinksGenerator, get_uuid_or_404 @@ -19,9 +21,8 @@ ShelfMember, ShelfType, ShelfTypeNames, - query_following, - query_item_category, - query_visible, + q_piece_in_home_feed_of_user, + q_piece_visible_to_user, ) from .forms import * @@ -74,6 +75,8 @@ def retrieve(request, item_path, item_uuid): item_url = f"/{item_path}/{item_uuid}" if item.url != item_url: return redirect(item.url) + if request.headers.get("Accept", "").endswith("json"): + return redirect(item.api_url) skipcheck = request.GET.get("skipcheck", False) and request.user.is_authenticated if not skipcheck and item.merged_to_item: return redirect(item.merged_to_item.url) @@ -91,16 +94,16 @@ def retrieve(request, item_path, item_uuid): child_item_comments = [] shelf_types = [(n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category] if request.user.is_authenticated: - visible = query_visible(request.user) - mark = Mark(request.user, item) + visible = q_piece_visible_to_user(request.user) + mark = Mark(request.user.identity, item) child_item_comments = Comment.objects.filter( - owner=request.user, item__in=item.child_items.all() + owner=request.user.identity, item__in=item.child_items.all() ) review = mark.review - my_collections = item.collections.all().filter(owner=request.user) + my_collections = item.collections.all().filter(owner=request.user.identity) collection_list = ( item.collections.all() - .exclude(owner=request.user) + .exclude(owner=request.user.identity) .filter(visible) .annotate(like_counts=Count("likes")) .order_by("-like_counts") @@ -145,9 +148,9 @@ def mark_list(request, item_path, item_uuid, following_only=False): raise Http404() queryset = ShelfMember.objects.filter(item=item).order_by("-created_time") if following_only: - queryset = queryset.filter(query_following(request.user)) + queryset = queryset.filter(q_piece_in_home_feed_of_user(request.user)) else: - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) marks = paginator.get_page(page_number) @@ -169,7 +172,7 @@ def review_list(request, item_path, item_uuid): if not item: raise Http404() queryset = Review.objects.filter(item=item).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) reviews = paginator.get_page(page_number) @@ -192,7 +195,7 @@ def comments(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -218,7 +221,7 @@ def comments_by_episode(request, item_path, item_uuid): else: ids = item.child_item_ids queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -240,7 +243,7 @@ def reviews(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Review.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -254,15 +257,10 @@ def reviews(request, item_path, item_uuid): ) +@require_http_methods(["GET"]) def discover(request): if request.method != "GET": raise BadRequest() - user = request.user - if user.is_authenticated: - layout = user.preference.discover_layout - else: - layout = [] - cache_key = "public_gallery" gallery_list = cache.get(cache_key, []) @@ -274,10 +272,14 @@ def discover(request): # ) # gallery["items"] = Item.objects.filter(id__in=ids) - if user.is_authenticated: + if request.user.is_authenticated: + if not request.user.registration_complete: + return redirect(reverse("users:register")) + layout = request.user.preference.discover_layout + identity = request.user.identity podcast_ids = [ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.Podcast ) ] @@ -287,7 +289,7 @@ def discover(request): books_in_progress = Edition.objects.filter( id__in=[ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.Book )[:10] ] @@ -295,21 +297,23 @@ def discover(request): tvshows_in_progress = Item.objects.filter( id__in=[ p.item_id - for p in user.shelf_manager.get_latest_members( + for p in identity.shelf_manager.get_latest_members( ShelfType.PROGRESS, ItemCategory.TV )[:10] ] ) else: + identity = None recent_podcast_episodes = [] books_in_progress = [] tvshows_in_progress = [] + layout = [] return render( request, "discover.html", { - "user": user, + "identity": identity, "gallery_list": gallery_list, "recent_podcast_episodes": recent_podcast_episodes, "books_in_progress": books_in_progress, diff --git a/common/apps.py b/common/apps.py index 3ce38941..5756b824 100644 --- a/common/apps.py +++ b/common/apps.py @@ -1,5 +1,14 @@ from django.apps import AppConfig +from django.db.models.signals import post_migrate class CommonConfig(AppConfig): name = "common" + + def ready(self): + post_migrate.connect(self.setup, sender=self) + + def setup(self, **kwargs): + from .setup import Setup + + Setup().run() diff --git a/common/management/commands/cron.py b/common/management/commands/cron.py new file mode 100644 index 00000000..a4dd9e4e --- /dev/null +++ b/common/management/commands/cron.py @@ -0,0 +1,42 @@ +from django.core.management.base import BaseCommand +from loguru import logger + +from catalog.jobs import * # noqa +from common.models import JobManager + + +class Command(BaseCommand): + help = "Schedule timed jobs" + + def add_arguments(self, parser): + parser.add_argument( + "--cancel", + action="store_true", + ) + parser.add_argument( + "--schedule", + action="store_true", + ) + parser.add_argument( + "--list", + action="store_true", + ) + parser.add_argument( + "--runonce", + action="append", + ) + + def handle(self, *args, **options): + if options["cancel"]: + JobManager.cancel() + if options["schedule"]: + JobManager.cancel() # cancel previously scheduled jobs if any + JobManager.schedule() + if options["runonce"]: + for job_id in options["runonce"]: + run = JobManager.run(job_id) + if not run: + logger.error(f"Job not found: {job_id}") + if options["list"]: + jobs = JobManager.get_scheduled_job_ids() + logger.info(f"{len(jobs)} scheduled jobs: {jobs}") diff --git a/common/management/commands/delete_job.py b/common/management/commands/delete_job.py deleted file mode 100644 index 66f2690e..00000000 --- a/common/management/commands/delete_job.py +++ /dev/null @@ -1,20 +0,0 @@ -import pprint - -from django.core.management.base import BaseCommand -from redis import Redis -from rq import Queue -from rq.job import Job - - -class Command(BaseCommand): - help = "Delete a job" - - def add_arguments(self, parser): - parser.add_argument("job_id", type=str, help="Job ID") - - def handle(self, *args, **options): - redis = Redis() - job_id = str(options["job_id"]) - job = Job.fetch(job_id, connection=redis) - job.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {job}")) diff --git a/common/management/commands/jobs.py b/common/management/commands/jobs.py new file mode 100644 index 00000000..011c4129 --- /dev/null +++ b/common/management/commands/jobs.py @@ -0,0 +1,45 @@ +import pprint + +import django_rq +from django.conf import settings +from django.core.management.base import BaseCommand +from redis import Redis +from rq import Queue +from rq.job import Job + + +class Command(BaseCommand): + help = "Show jobs in queue" + + def add_arguments(self, parser): + parser.add_argument("--delete", action="append") + parser.add_argument("--list", action="store_true") + + def handle(self, *args, **options): + if options["delete"]: + for job_id in options["delete"]: + job = Job.fetch(job_id, connection=django_rq.get_connection("fetch")) + job.delete() + self.stdout.write(self.style.SUCCESS(f"Deleted {job}")) + if options["list"]: + queues = settings.RQ_QUEUES.keys() + for q in queues: + queue = django_rq.get_queue(q) + for registry in [ + queue.scheduled_job_registry, + queue.started_job_registry, + queue.deferred_job_registry, + queue.finished_job_registry, + queue.failed_job_registry, + queue.canceled_job_registry, + ]: + for job_id in registry.get_job_ids(): + try: + job = Job.fetch( + job_id, connection=django_rq.get_connection(q) + ) + self.stdout.write( + self.style.SUCCESS(f"{registry.key} {repr(job)}") + ) + except Exception as e: + print(f"Error fetching {registry.key} {job_id}") diff --git a/common/management/commands/list_jobs.py b/common/management/commands/list_jobs.py deleted file mode 100644 index 51f189c5..00000000 --- a/common/management/commands/list_jobs.py +++ /dev/null @@ -1,31 +0,0 @@ -import pprint - -from django.core.management.base import BaseCommand -from redis import Redis -from rq import Queue -from rq.job import Job - - -class Command(BaseCommand): - help = "Show jobs in queue" - - def add_arguments(self, parser): - parser.add_argument("queue", type=str, help="Queue") - - def handle(self, *args, **options): - redis = Redis() - queue = Queue(str(options["queue"]), connection=redis) - for registry in [ - queue.started_job_registry, - queue.deferred_job_registry, - queue.finished_job_registry, - queue.failed_job_registry, - queue.scheduled_job_registry, - ]: - self.stdout.write(self.style.SUCCESS(f"Registry {registry}")) - for job_id in registry.get_job_ids(): - try: - job = Job.fetch(job_id, connection=redis) - pprint.pp(job) - except Exception as e: - print(f"Error fetching {job_id}") diff --git a/common/management/commands/setup.py b/common/management/commands/setup.py new file mode 100644 index 00000000..6f3a23fc --- /dev/null +++ b/common/management/commands/setup.py @@ -0,0 +1,94 @@ +from django.conf import settings +from django.core.management.base import BaseCommand +from loguru import logger + +from catalog.search.typesense import Indexer +from takahe.models import Config as TakaheConfig +from takahe.models import Domain as TakaheDomain +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser +from users.models import User + + +class Command(BaseCommand): + help = "Post-Migration Setup" + + def create_site(self, domain, service_domain): + TakaheDomain.objects.create( + domain=domain, + local=True, + service_domain=service_domain, + notes="NeoDB", + nodeinfo={}, + ) + TakaheConfig.objects.update_or_create( + key="public_timeline", + user=None, + identity=None, + domain=None, + defaults={"json": False}, + ) + + def sync_site_config(self): + domain = settings.SITE_INFO["site_domain"] + if not domain: + raise ValueError("Panic: site_domain is not set!") + icon = settings.SITE_INFO["site_logo"] + name = settings.SITE_INFO["site_name"] + service_domain = settings.SITE_INFO.get("site_service_domain") + if not TakaheDomain.objects.filter(domain=domain).exists(): + logger.warning(f"Domain {domain} not found, creating...") + self.create_site(domain, service_domain) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=None, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain_id=domain, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_icon", + user=None, + identity=None, + domain_id=None, + defaults={"json": icon}, + ) + TakaheConfig.objects.update_or_create( + key="site_icon", + user=None, + identity=None, + domain_id=domain, + defaults={"json": icon}, + ) + + def sync_admin_user(self): + users = User.objects.filter(username__in=settings.SETUP_ADMIN_USERNAMES) + for user in users: + if user.is_superuser: + logger.debug(f"User {user.username} is already admin") + else: + user.is_superuser = True + user.save(update_fields=["is_superuser"]) + TakaheUser.objects.filter(email=f"@{user.username}").update(admin=True) + logger.info(f"Updated user {user.username} as admin") + + def handle(self, *args, **options): + # Update site name if changed + self.sync_site_config() + + # Create/update admin user if configured in env + self.sync_admin_user() + + # Create basic emoji if not exists + + # Create search index if not exists + Indexer.init() + + # Register cron jobs if not yet diff --git a/common/models.py b/common/models.py index e69de29b..f76a1dee 100644 --- a/common/models.py +++ b/common/models.py @@ -0,0 +1,72 @@ +from datetime import timedelta + +import django_rq +from loguru import logger +from rq.job import Job +from rq.registry import ScheduledJobRegistry + + +class BaseJob: + interval = timedelta(seconds=1) + + @classmethod + def cancel(cls): + job_id = cls.__name__ + try: + job = Job.fetch(id=job_id, connection=django_rq.get_connection("cron")) + if job.get_status() in ["queued", "scheduled"]: + logger.info(f"Cancel queued job: {job_id}") + job.cancel() + registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron")) + registry.remove(job) + except: + pass + + @classmethod + def schedule(cls): + job_id = cls.__name__ + logger.info(f"Scheduling job: {job_id}") + django_rq.get_queue("cron").enqueue_in( + cls.interval, cls._run, job_id=job_id, result_ttl=0, failure_ttl=0 + ) + + @classmethod + def _run(cls): + cls.schedule() # schedule next run + cls().run() + + def run(self): + pass + + +class JobManager: + registry = set() + + @classmethod + def register(cls, target): + cls.registry.add(target) + return target + + @classmethod + def schedule(cls): + for j in cls.registry: + j.schedule() + + @classmethod + def cancel(cls): + for j in cls.registry: + j.cancel() + + @classmethod + def run(cls, job_id): + for j in cls.registry: + if j.__name__ == job_id: + logger.info(f"Run job: {job_id}") + j().run() + return True + return False + + @classmethod + def get_scheduled_job_ids(cls): + registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron")) + return registry.get_job_ids() diff --git a/common/setup.py b/common/setup.py new file mode 100644 index 00000000..7fae31f6 --- /dev/null +++ b/common/setup.py @@ -0,0 +1,155 @@ +from django.conf import settings +from loguru import logger + +from catalog.search.models import Indexer +from takahe.models import Config as TakaheConfig +from takahe.models import Domain as TakaheDomain +from takahe.models import Follow as TakaheFollow +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser +from takahe.utils import Takahe +from users.models import User + + +class Setup: + """ + Post-Migration Setup + """ + + def create_site(self, domain, service_domain): + TakaheDomain.objects.create( + domain=domain, + local=True, + service_domain=service_domain, + notes="NeoDB", + nodeinfo={}, + ) + TakaheConfig.objects.update_or_create( + key="public_timeline", + user=None, + identity=None, + domain=None, + defaults={"json": False}, + ) + + def sync_site_config(self): + domain = settings.SITE_INFO["site_domain"] + if not domain: + raise ValueError("Panic: site_domain is not set!") + icon = settings.SITE_INFO["site_logo"] + name = settings.SITE_INFO["site_name"] + service_domain = settings.SITE_INFO.get("site_service_domain") + + if not TakaheDomain.objects.filter(domain=domain).exists(): + logger.info(f"Domain {domain} not found, creating...") + self.create_site(domain, service_domain) + if ( + TakaheIdentity.objects.filter(local=True) + .exclude(domain_id__isnull=True) + .exists() + ): + logger.warning( + f"Local identities are found for other domains, there might be a configuration issue." + ) + + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=None, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain_id=domain, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_icon", + user=None, + identity=None, + domain_id=None, + defaults={"json": icon}, + ) + TakaheConfig.objects.update_or_create( + key="site_icon", + user=None, + identity=None, + domain_id=domain, + defaults={"json": icon}, + ) + + def sync_admin_user(self): + users = User.objects.filter(username__in=settings.SETUP_ADMIN_USERNAMES) + for user in users: + if user.is_superuser: + logger.debug(f"User {user.username} is already admin") + else: + user.is_superuser = True + user.save(update_fields=["is_superuser"]) + TakaheUser.objects.filter(email=f"@{user.username}").update(admin=True) + logger.info(f"Updated user {user.username} as admin") + + def sync_relay(self): + relay_follow = TakaheFollow.objects.filter( + source__username="__relay__", + source__local=True, + target__actor_uri=settings.DEFAULT_RELAY_SERVER, + ).first() + if settings.DISABLE_DEFAULT_RELAY: + if relay_follow: + logger.info("Default relay is disabled, unsubscribing...") + Takahe.create_internal_message( + { + "type": "UnfollowRelay", + "actor_uri": settings.DEFAULT_RELAY_SERVER, + } + ) + else: + logger.debug(f"Default relay is disabled.") + else: + if relay_follow: + logger.debug( + f"Default relay is enabled and subscribed, state: {relay_follow.state}" + ) + else: + logger.info("Default relay is enabled, subscribing...") + relay_actor = TakaheIdentity.objects.filter( + username="__relay__", + local=True, + ).first() + if not relay_actor: + logger.warning( + f"Default relay is enabled but relay actor does not exist." + ) + return + Takahe.create_internal_message( + { + "type": "AddFollow", + "source": relay_actor.pk, + "target_actor": settings.DEFAULT_RELAY_SERVER, + "boosts": False, + } + ) + + def run(self): + logger.info("Running post-migration setup...") + # Update site name if changed + self.sync_site_config() + + # Create/update admin user if configured in env + self.sync_admin_user() + + # Subscribe to default relay if enabled + self.sync_relay() + + # Create basic emoji if not exists + + # Create search index if not exists + Indexer.init() + + # Register cron jobs if not yet + + logger.info("Finished post-migration setup.") diff --git a/common/static/scss/_post.scss b/common/static/scss/_post.scss new file mode 100644 index 00000000..ba45b7d1 --- /dev/null +++ b/common/static/scss/_post.scss @@ -0,0 +1,29 @@ +section.replies { + border-left: 1px solid var(--pico-muted-color); + margin-left: var(--pico-spacing); + padding-left: var(--pico-spacing); + margin-bottom: 0 !important; + >div { + margin-bottom: calc(var(--pico-spacing)); + } + p { + margin-bottom: 0; + } + details { + summary { + text-decoration: underline; + } + } + form { + margin-bottom: 0; + select { + width: min-content; + } + button{ + height: calc(1rem * var(--pico-line-height) + var(--pico-form-element-spacing-vertical) * 2 + var(--pico-border-width) * 2) + } + details.dropdown > summary::after { + display: none; + } + } +} diff --git a/common/static/scss/_sitelabel.scss b/common/static/scss/_sitelabel.scss index de8c0b2e..021d1493 100644 --- a/common/static/scss/_sitelabel.scss +++ b/common/static/scss/_sitelabel.scss @@ -71,6 +71,12 @@ font-weight: lighter; } + .fedi { + background: var(--pico-primary); + color: white; + font-weight: lighter; + } + .tmdb { background: linear-gradient(90deg, #91CCA3, #1FB4E2); color: white; diff --git a/common/static/scss/neodb.scss b/common/static/scss/neodb.scss index 92752b27..bde590ef 100644 --- a/common/static/scss/neodb.scss +++ b/common/static/scss/neodb.scss @@ -18,3 +18,4 @@ @import '_common.scss'; @import '_login.scss'; @import '_form.scss'; +@import '_post.scss'; diff --git a/common/templates/_field.html b/common/templates/_field.html new file mode 100644 index 00000000..79b7204a --- /dev/null +++ b/common/templates/_field.html @@ -0,0 +1,19 @@ +

+ + {{ field }} + {% if field.help_text %} + + {{ field.help_text|safe|linebreaksbr }} + {% if field.field.required %}(Required){% endif %} + + {% endif %} + {{ field.errors }} + {% if field.field.widget.input_type == "file" and field.value %} + {{ field.label }} + {% endif %} +
diff --git a/common/templates/_footer.html b/common/templates/_footer.html index df554287..e6626a2d 100644 --- a/common/templates/_footer.html +++ b/common/templates/_footer.html @@ -1,24 +1,12 @@

- {% if social_link %} + {% for link in site_links %} 关注我们 - {% endif %} - {% if support_link %} - 问题反馈 - {% endif %} - {% if donation_link %} - 捐助本站 - {% endif %} + href="{{ link.url }}">{{ link.title }} + {% endfor %} 公告栏 应用开发
diff --git a/journal/templates/user_collection_list.html b/journal/templates/user_collection_list.html index e8071350..cb87b02c 100644 --- a/journal/templates/user_collection_list.html +++ b/journal/templates/user_collection_list.html @@ -13,7 +13,7 @@ - {{ site_name }} - {{ user.display_name }} - + <title>{{ site_name }} - {{ identity.display_name }} - {% if liked %}关注的{% endif %} 收藏单 {% include "common_libs.html" with jquery=0 v2=1 %} @@ -23,7 +23,7 @@
- {{ user.display_name }} - + {{ identity.display_name }} - {% if liked %}关注的{% endif %} 收藏单
@@ -37,7 +37,7 @@
+ {% if collection.shared_link %} href="{{ collection.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ collection.created_time|date }}
diff --git a/journal/templates/user_item_list_base.html b/journal/templates/user_item_list_base.html index 9a3a56be..0ef56e8b 100644 --- a/journal/templates/user_item_list_base.html +++ b/journal/templates/user_item_list_base.html @@ -11,7 +11,7 @@ - {% block title %}{{ site_name }} - {{ user.display_name }}{% endblock %} + {% block title %}{{ site_name }} - {{ identity.display_name }}{% endblock %} {% include "common_libs.html" with jquery=0 v2=1 %} @@ -19,7 +19,7 @@
- {% block head %}{{ user.display_name }}{% endblock %} + {% block head %}{{ identity.display_name }}{% endblock %}
{% for member in members %} diff --git a/journal/templates/user_mark_list.html b/journal/templates/user_mark_list.html index e10046fa..e94a43dd 100644 --- a/journal/templates/user_mark_list.html +++ b/journal/templates/user_mark_list.html @@ -1,8 +1,8 @@ {% extends 'user_item_list_base.html' %} {% load i18n %} {% block title %} - {{ site_name }} - {{ user.display_name }} - {% trans '标记' %} + {{ site_name }} - {{ identity.display_name }} - {% trans '标记' %} {% endblock %} {% block head %} - {{ user.display_name }} - {% trans '标记' %} + {{ identity.display_name }} - {% trans '标记' %} {% endblock %} diff --git a/journal/templates/user_review_list.html b/journal/templates/user_review_list.html index 2c6a59c2..2c27de16 100644 --- a/journal/templates/user_review_list.html +++ b/journal/templates/user_review_list.html @@ -1,8 +1,8 @@ {% extends "user_item_list_base.html" %} {% load i18n %} {% block title %} - {{ site_name }} - {{ user.display_name }} - {% trans '评论' %} + {{ site_name }} - {{ identity.display_name }} - {% trans '评论' %} {% endblock %} {% block head %} - {{ user.display_name }} - {% trans '评论' %} + {{ identity.display_name }} - {% trans '评论' %} {% endblock %} diff --git a/journal/templates/user_tag_list.html b/journal/templates/user_tag_list.html index 125f09a2..f0b94452 100644 --- a/journal/templates/user_tag_list.html +++ b/journal/templates/user_tag_list.html @@ -13,7 +13,7 @@ - {{ site_name }} - {{ user.display_name }} 的标签 + {{ site_name }} - {{ identity.display_name }} 的标签 {% include "common_libs.html" with jquery=0 v2=1 %} @@ -25,7 +25,7 @@
{% trans '全部标签' %}
{% for v in tags %} - {{ v.title }} + {{ v.title }} ({{ v.total }}) diff --git a/journal/templates/user_tagmember_list.html b/journal/templates/user_tagmember_list.html index 0122238c..1afd0083 100644 --- a/journal/templates/user_tagmember_list.html +++ b/journal/templates/user_tagmember_list.html @@ -1,15 +1,15 @@ {% extends "user_item_list_base.html" %} {% load i18n %} {% block title %} - {{ site_name }} - {{ user.display_name }} - {{ tag.title }} {% trans '标签' %} + {{ site_name }} - {{ identity.display_name }} - {{ tag.title }} {% trans '标签' %} {% endblock %} {% block head %} {{ tag.title }}
{% if tag.visibility > 0 %}{% endif %} - {{ user.display_name }}的{% trans '标签' %} - {% if user == request.user %} + {{ identity.display_name }}的{% trans '标签' %} + {% if identity.user == request.user %}
no log - shelf_manager.move_item(book1, ShelfType.WISHLIST, silence=True) - self.assertEqual(log.count(), 5) - shelf_manager.move_item(book1, ShelfType.PROGRESS, silence=True) - self.assertEqual(log.count(), 5) - # test delete one log - first_log = log.first() - Mark(user, book1).delete_log(first_log.id) - self.assertEqual(log.count(), 4) - # # test delete mark -> leave one log: 移除标记 - # Mark(user, book1).delete() - # self.assertEqual(log.count(), 1) - # # test delete all logs - # shelf_manager.move_item(book1, ShelfType.PROGRESS) - # self.assertEqual(log.count(), 2) - # Mark(user, book1).delete(silence=True) - # self.assertEqual(log.count(), 0) + # test delete mark -> one more log + Mark(user.identity, book1).delete() + self.assertEqual(log.count(), 6) class TagTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") - self.movie1 = Edition.objects.create(title="Hyperion, The Movie") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") - self.user2 = User.register(mastodon_site="site2", mastodon_username="name2") - self.user3 = User.register(mastodon_site="site2", mastodon_username="name3") + self.movie1 = Edition.objects.create(title="Fight Club") + self.user1 = User.register(email="a@b.com", username="user") + self.user2 = User.register(email="x@b.com", username="user2") + self.user3 = User.register(email="y@b.com", username="user3") pass def test_user_tag(self): t1 = "tag 1" t2 = "tag 2" t3 = "tag 3" - TagManager.tag_item_by_user(self.book1, self.user2, [t1, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t1, t3]) self.assertEqual(self.book1.tags, [t1, t3]) - TagManager.tag_item_by_user(self.book1, self.user2, [t2, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t2, t3]) self.assertEqual(self.book1.tags, [t2, t3]) class MarkTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") + self.user1 = User.register(email="a@b.com", username="user") pref = self.user1.preference pref.default_visibility = 2 pref.save() def test_mark(self): - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, None) self.assertEqual(mark.shelf_label, None) self.assertEqual(mark.comment_text, None) @@ -157,7 +156,7 @@ def test_mark(self): self.assertEqual(mark.tags, []) mark.update(ShelfType.WISHLIST, "a gentle comment", 9, 1) - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, ShelfType.WISHLIST) self.assertEqual(mark.shelf_label, "想读的书") self.assertEqual(mark.comment_text, "a gentle comment") @@ -166,10 +165,17 @@ def test_mark(self): self.assertEqual(mark.review, None) self.assertEqual(mark.tags, []) - review = Review.review_item_by_user(self.book1, self.user1, "Critic", "Review") - mark = Mark(self.user1, self.book1) + def test_review(self): + review = Review.update_item_review( + self.book1, self.user1.identity, "Critic", "Review" + ) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.review, review) + Review.update_item_review(self.book1, self.user1.identity, None, None) + mark = Mark(self.user1.identity, self.book1) + self.assertIsNone(mark.review) - TagManager.tag_item_by_user(self.book1, self.user1, [" Sci-Fi ", " fic "]) - mark = Mark(self.user1, self.book1) + def test_tag(self): + TagManager.tag_item(self.book1, self.user1.identity, [" Sci-Fi ", " fic "]) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.tags, ["Sci-Fi", "fic"]) diff --git a/journal/urls.py b/journal/urls.py index a220ff15..215151c2 100644 --- a/journal/urls.py +++ b/journal/urls.py @@ -23,6 +23,11 @@ def _get_all_shelf_types(): path("unlike/", unlike, name="unlike"), path("mark/", mark, name="mark"), path("comment/", comment, name="comment"), + path("piece//replies", piece_replies, name="piece_replies"), + path("post//replies", post_replies, name="post_replies"), + path("post//reply", post_reply, name="post_reply"), + path("post//like", post_like, name="post_like"), + path("post//unlike", post_unlike, name="post_unlike"), path("mark_log//", mark_log, name="mark_log"), path( "add_to_collection/", add_to_collection, name="add_to_collection" diff --git a/journal/views/__init__.py b/journal/views/__init__.py index 759efc54..aa58787f 100644 --- a/journal/views/__init__.py +++ b/journal/views/__init__.py @@ -25,6 +25,7 @@ user_mark_list, wish, ) +from .post import piece_replies, post_like, post_replies, post_reply, post_unlike from .profile import profile, user_calendar_data from .review import ReviewFeed, review_edit, review_retrieve, user_review_list from .tag import user_tag_edit, user_tag_list, user_tag_member_list diff --git a/journal/views/collection.py b/journal/views/collection.py index 6519498e..a15558e9 100644 --- a/journal/views/collection.py +++ b/journal/views/collection.py @@ -1,28 +1,28 @@ from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied -from django.http import Http404, HttpResponse, HttpResponseRedirect +from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext_lazy as _ -from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from journal.models.renderers import convert_leading_space_in_md +from catalog.models import Item +from common.utils import AuthedHttpRequest, get_uuid_or_404 from mastodon.api import share_collection from users.models import User +from users.models.apidentity import APIdentity from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_relogin +from .common import render_relogin, target_identity_required @login_required -def add_to_collection(request, item_uuid): +def add_to_collection(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if request.method == "GET": - collections = Collection.objects.filter(owner=request.user) + collections = Collection.objects.filter(owner=request.user.identity) return render( request, "add_to_collection.html", @@ -35,37 +35,37 @@ def add_to_collection(request, item_uuid): cid = int(request.POST.get("collection_id", default=0)) if not cid: cid = Collection.objects.create( - owner=request.user, title=f"{request.user.display_name}的收藏单" + owner=request.user.identity, title=f"{request.user.display_name}的收藏单" ).id - collection = Collection.objects.get(owner=request.user, id=cid) + collection = Collection.objects.get(owner=request.user.identity, id=cid) collection.append_item(item, note=request.POST.get("note")) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) -def collection_retrieve(request, collection_uuid): +def collection_retrieve(request: AuthedHttpRequest, collection_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() follower_count = collection.likes.all().count() following = ( - Like.user_liked_piece(request.user, collection) + Like.user_liked_piece(request.user.identity, collection) if request.user.is_authenticated else False ) featured_since = ( - collection.featured_by_user_since(request.user) + collection.featured_since(request.user.identity) if request.user.is_authenticated else None ) available_as_featured = ( request.user.is_authenticated - and (following or request.user == collection.owner) + and (following or request.user.identity == collection.owner) and not featured_since and collection.members.all().exists() ) stats = {} if featured_since: - stats = collection.get_stats_for_user(request.user) + stats = collection.get_stats(request.user.identity) stats["wishlist_deg"] = ( round(stats["wishlist"] / stats["total"] * 360) if stats["total"] else 0 ) @@ -85,38 +85,41 @@ def collection_retrieve(request, collection_uuid): "stats": stats, "available_as_featured": available_as_featured, "featured_since": featured_since, + "editable": collection.is_editable_by(request.user), }, ) @login_required -def collection_add_featured(request, collection_uuid): +def collection_add_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() - FeaturedCollection.objects.update_or_create(owner=request.user, target=collection) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + FeaturedCollection.objects.update_or_create( + owner=request.user.identity, target=collection + ) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_remove_featured(request, collection_uuid): +def collection_remove_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() fc = FeaturedCollection.objects.filter( - owner=request.user, target=collection + owner=request.user.identity, target=collection ).first() if fc: fc.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_share(request, collection_uuid): +def collection_share(request: AuthedHttpRequest, collection_uuid): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -130,14 +133,16 @@ def collection_share(request, collection_uuid): visibility = int(request.POST.get("visibility", default=0)) comment = request.POST.get("comment") if share_collection(collection, comment, request.user, visibility): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: return render_relogin(request) else: raise BadRequest() -def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): +def collection_retrieve_items( + request: AuthedHttpRequest, collection_uuid, edit=False, msg=None +): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() @@ -155,7 +160,7 @@ def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): @login_required -def collection_append_item(request, collection_uuid): +def collection_append_item(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -175,7 +180,7 @@ def collection_append_item(request, collection_uuid): @login_required -def collection_remove_item(request, collection_uuid, item_uuid): +def collection_remove_item(request: AuthedHttpRequest, collection_uuid, item_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -187,7 +192,9 @@ def collection_remove_item(request, collection_uuid, item_uuid): @login_required -def collection_move_item(request, direction, collection_uuid, item_uuid): +def collection_move_item( + request: AuthedHttpRequest, direction, collection_uuid, item_uuid +): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -202,7 +209,7 @@ def collection_move_item(request, direction, collection_uuid, item_uuid): @login_required -def collection_update_member_order(request, collection_uuid): +def collection_update_member_order(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -217,7 +224,7 @@ def collection_update_member_order(request, collection_uuid): @login_required -def collection_update_item_note(request, collection_uuid, item_uuid): +def collection_update_item_note(request: AuthedHttpRequest, collection_uuid, item_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_editable_by(request.user): raise PermissionDenied() @@ -241,7 +248,7 @@ def collection_update_item_note(request, collection_uuid, item_uuid): @login_required -def collection_edit(request, collection_uuid=None): +def collection_edit(request: AuthedHttpRequest, collection_uuid=None): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -259,7 +266,8 @@ def collection_edit(request, collection_uuid=None): { "form": form, "collection": collection, - "user": collection.owner if collection else request.user, + "user": collection.owner.user if collection else request.user, + "identity": collection.owner if collection else request.user.identity, }, ) elif request.method == "POST": @@ -270,7 +278,7 @@ def collection_edit(request, collection_uuid=None): ) if form.is_valid(): if not collection: - form.instance.owner = request.user + form.instance.owner = request.user.identity form.instance.edited_time = timezone.now() form.save() return redirect( @@ -283,47 +291,36 @@ def collection_edit(request, collection_uuid=None): @login_required -def user_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(owner=user) - if user != request.user: - if request.user.is_following(user): - collections = collections.filter(visibility__in=[0, 1]) - else: - collections = collections.filter(visibility=0) +@target_identity_required +def user_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(owner=target).filter( + q_owned_piece_visible_to_user(request.user, target) + ) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, + "identity": target, "collections": collections, }, ) @login_required -def user_liked_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(likes__owner=user) - if user != request.user: - collections = collections.filter(query_visible(request.user)) +@target_identity_required +def user_liked_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(likes__owner=target) + if target.user != request.user: + collections = collections.filter(q_piece_visible_to_user(request.user)) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, + "identity": target, "collections": collections, "liked": True, }, diff --git a/journal/views/common.py b/journal/views/common.py index cb36aa36..10cc6dfe 100644 --- a/journal/views/common.py +++ b/journal/views/common.py @@ -6,9 +6,12 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from users.models import User -from users.views import render_user_blocked, render_user_not_found +from common.utils import ( + AuthedHttpRequest, + PageLinksGenerator, + get_uuid_or_404, + target_identity_required, +) from ..forms import * from ..models import * @@ -41,42 +44,51 @@ def render_list_not_found(request): ) +@login_required +@target_identity_required def render_list( - request, user_name, type, shelf_type=None, item_category=None, tag_title=None + request: AuthedHttpRequest, + user_name, + type, + shelf_type=None, + item_category=None, + tag_title=None, ): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) + target = request.target_identity + viewer = request.user.identity tag = None if type == "mark": - queryset = user.shelf_manager.get_latest_members(shelf_type, item_category) + queryset = target.user.shelf_manager.get_latest_members( + shelf_type, item_category + ) elif type == "tagmember": - tag = Tag.objects.filter(owner=user, title=tag_title).first() + tag = Tag.objects.filter(owner=target, title=tag_title).first() if not tag: return render_list_not_found(request) - if tag.visibility != 0 and user != request.user: + if tag.visibility != 0 and target != viewer: return render_list_not_found(request) queryset = TagMember.objects.filter(parent=tag) - elif type == "review": - queryset = Review.objects.filter(owner=user) - queryset = queryset.filter(query_item_category(item_category)) + elif type == "review" and item_category: + queryset = Review.objects.filter(q_item_in_category(item_category)) else: raise BadRequest() - queryset = queryset.filter(q_visible_to(request.user, user)).order_by( - "-created_time" - ) + queryset = queryset.filter( + q_owned_piece_visible_to_user(request.user, target) + ).order_by("-created_time") paginator = Paginator(queryset, PAGE_SIZE) - page_number = request.GET.get("page", default=1) + page_number = int(request.GET.get("page", default=1)) members = paginator.get_page(page_number) pagination = PageLinksGenerator(PAGE_SIZE, page_number, paginator.num_pages) return render( request, f"user_{type}_list.html", - {"user": user, "members": members, "tag": tag, "pagination": pagination}, + { + "user": target.user, + "identity": target, + "members": members, + "tag": tag, + "pagination": pagination, + }, ) diff --git a/journal/views/mark.py b/journal/views/mark.py index b121e89d..758aa362 100644 --- a/journal/views/mark.py +++ b/journal/views/mark.py @@ -12,17 +12,17 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from mastodon.api import ( get_spoiler_text, get_status_id_by_url, get_visibility, post_toot, ) +from takahe.utils import Takahe -from ..forms import * -from ..models import * -from .common import render_list, render_relogin +from ..models import Comment, Mark, Piece, ShelfType, ShelfTypeNames, TagManager +from .common import render_list, render_relogin, target_identity_required _logger = logging.getLogger(__name__) PAGE_SIZE = 10 @@ -31,28 +31,30 @@ @login_required -def wish(request, item_uuid): +def wish(request: AuthedHttpRequest, item_uuid): if request.method != "POST": raise BadRequest() item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item: raise Http404() - request.user.shelf_manager.move_item(item, ShelfType.WISHLIST) + request.user.identity.shelf_manager.move_item(item, ShelfType.WISHLIST) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) return HttpResponse(_checkmark) @login_required -def like(request, piece_uuid): +def like(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_like_piece(request.user, piece) + post = piece.latest_post + if post: + Takahe.like_post(post.pk, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -68,15 +70,17 @@ def like(request, piece_uuid): @login_required -def unlike(request, piece_uuid): +def unlike(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_unlike_piece(request.user, piece) + post = piece.latest_post + if post: + Takahe.unlike_post(post.pk, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -92,11 +96,11 @@ def unlike(request, piece_uuid): @login_required -def mark(request, item_uuid): +def mark(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "GET": - tags = TagManager.get_item_tags_by_user(item, request.user) + tags = request.user.identity.tag_manager.get_item_tags(item) shelf_types = [ (n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category ] @@ -115,15 +119,8 @@ def mark(request, item_uuid): ) elif request.method == "POST": if request.POST.get("delete", default=False): - silence = request.POST.get("silence", False) - mark.delete(silence=silence) - if ( - silence - ): # this means the mark is deleted from mark_history, thus redirect to item page - return redirect( - reverse("catalog:retrieve", args=[item.url_path, item.uuid]) - ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + mark.delete() + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: visibility = int(request.POST.get("visibility", default=0)) rating_grade = request.POST.get("rating_grade", default=0) @@ -143,7 +140,7 @@ def mark(request, item_uuid): ) if mark_date and mark_date >= timezone.now(): mark_date = None - TagManager.tag_item_by_user(item, request.user, tags, visibility) + TagManager.tag_item(item, request.user.identity, tags, visibility) try: mark.update( status, @@ -167,7 +164,7 @@ def mark(request, item_uuid): "secondary_msg": err, }, ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() @@ -202,12 +199,12 @@ def share_comment(user, item, text, visibility, shared_link=None, position=None) @login_required -def mark_log(request, item_uuid, log_id): +def mark_log(request: AuthedHttpRequest, item_uuid, log_id): """ Delete log of one item by log id. """ item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "POST": if request.GET.get("delete", default=False): if log_id: @@ -219,7 +216,7 @@ def mark_log(request, item_uuid, log_id): @login_required -def comment(request, item_uuid): +def comment(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item.class_name in ["podcastepisode", "tvepisode"]: raise BadRequest("不支持评论此类型的条目") @@ -246,7 +243,7 @@ def comment(request, item_uuid): if not comment: raise Http404() comment.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) visibility = int(request.POST.get("visibility", default=0)) text = request.POST.get("text") position = None @@ -302,12 +299,11 @@ def comment(request, item_uuid): # ) if post_error: return render_relogin(request) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() -@login_required -def user_mark_list(request, user_name, shelf_type, item_category): +def user_mark_list(request: AuthedHttpRequest, user_name, shelf_type, item_category): return render_list( request, user_name, "mark", shelf_type=shelf_type, item_category=item_category ) diff --git a/journal/views/post.py b/journal/views/post.py new file mode 100644 index 00000000..cc10e615 --- /dev/null +++ b/journal/views/post.py @@ -0,0 +1,64 @@ +from django.contrib.auth.decorators import login_required +from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied +from django.shortcuts import get_object_or_404, redirect, render +from django.urls import reverse +from django.utils.translation import gettext_lazy as _ + +from common.utils import ( + AuthedHttpRequest, + PageLinksGenerator, + get_uuid_or_404, + target_identity_required, +) +from takahe.utils import Takahe + +from ..forms import * +from ..models import * + + +@login_required +def piece_replies(request: AuthedHttpRequest, piece_uuid: str): + piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) + if not piece.is_visible_to(request.user): + raise PermissionDenied() + replies = piece.get_replies(request.user.identity) + return render( + request, "replies.html", {"post": piece.latest_post, "replies": replies} + ) + + +@login_required +def post_replies(request: AuthedHttpRequest, post_id: int): + replies = Takahe.get_replies_for_posts([post_id], request.user.identity.pk) + return render( + request, "replies.html", {"post": Takahe.get_post(post_id), "replies": replies} + ) + + +@login_required +def post_reply(request: AuthedHttpRequest, post_id: int): + content = request.POST.get("content", "").strip() + visibility = Takahe.Visibilities(int(request.POST.get("visibility", -1))) + if request.method != "POST" or not content: + raise BadRequest() + Takahe.reply_post(post_id, request.user.identity.pk, content, visibility) + replies = Takahe.get_replies_for_posts([post_id], request.user.identity.pk) + return render( + request, "replies.html", {"post": Takahe.get_post(post_id), "replies": replies} + ) + + +@login_required +def post_like(request: AuthedHttpRequest, post_id: int): + if request.method != "POST": + raise BadRequest() + Takahe.like_post(post_id, request.user.identity.pk) + return render(request, "action_like_post.html", {"post": Takahe.get_post(post_id)}) + + +@login_required +def post_unlike(request: AuthedHttpRequest, post_id: int): + if request.method != "POST": + raise BadRequest() + Takahe.unlike_post(post_id, request.user.identity.pk) + return render(request, "action_like_post.html", {"post": Takahe.get_post(post_id)}) diff --git a/journal/views/profile.py b/journal/views/profile.py index 04876050..046291e4 100644 --- a/journal/views/profile.py +++ b/journal/views/profile.py @@ -6,30 +6,27 @@ from user_messages import api as msg from catalog.models import * -from users.models import User +from common.utils import AuthedHttpRequest +from users.models import APIdentity, User from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required -def profile(request, user_name): +@target_identity_required +def profile(request: AuthedHttpRequest, user_name): if request.method != "GET": raise BadRequest() - user = User.get(user_name, case_sensitive=True) - if user is None or not user.is_active: - return render_user_not_found(request) - if user.mastodon_acct != user_name and user.username != user_name: - return redirect(user.url) - if not request.user.is_authenticated and user.preference.no_anonymous_view: - return render(request, "users/home_anonymous.html", {"user": user}) - if user != request.user and ( - user.is_blocked_by(request.user) or user.is_blocking(request.user) - ): - return render_user_blocked(request) + target = request.target_identity + # if user.mastodon_acct != user_name and user.username != user_name: + # return redirect(user.url) + if not request.user.is_authenticated and target.preference.no_anonymous_view: + return render(request, "users/home_anonymous.html", {"user": target.user}) + me = target.user == request.user - qv = q_visible_to(request.user, user) + qv = q_owned_piece_visible_to_user(request.user, target) shelf_list = {} visbile_categories = [ ItemCategory.Book, @@ -43,9 +40,9 @@ def profile(request, user_name): for category in visbile_categories: shelf_list[category] = {} for shelf_type in ShelfType: - label = user.shelf_manager.get_label(shelf_type, category) + label = target.shelf_manager.get_label(shelf_type, category) if label is not None: - members = user.shelf_manager.get_latest_members( + members = target.shelf_manager.get_latest_members( shelf_type, category ).filter(qv) shelf_list[category][shelf_type] = { @@ -53,35 +50,33 @@ def profile(request, user_name): "count": members.count(), "members": members[:10].prefetch_related("item"), } - reviews = ( - Review.objects.filter(owner=user) - .filter(qv) - .filter(query_item_category(category)) - .order_by("-created_time") + reviews = Review.objects.filter(q_item_in_category(category)).order_by( + "-created_time" ) shelf_list[category]["reviewed"] = { "title": "评论过的" + category.label, "count": reviews.count(), "members": reviews[:10].prefetch_related("item"), } - collections = ( - Collection.objects.filter(owner=user).filter(qv).order_by("-created_time") - ) + collections = Collection.objects.filter(qv).order_by("-created_time") liked_collections = ( - Like.user_likes_by_class(user, Collection) + Like.user_likes_by_class(target, Collection) .order_by("-edited_time") .values_list("target_id", flat=True) ) - if user != request.user: - liked_collections = liked_collections.filter(query_visible(request.user)) - top_tags = user.tag_manager.public_tags[:10] + if not me: + liked_collections = liked_collections.filter( + q_piece_visible_to_user(request.user) + ) + top_tags = target.tag_manager.public_tags[:10] else: - top_tags = user.tag_manager.all_tags[:10] + top_tags = target.tag_manager.all_tags[:10] return render( request, "profile.html", { - "user": user, + "user": target.user, + "identity": target, "top_tags": top_tags, "shelf_list": shelf_list, "collections": collections[:10], @@ -91,19 +86,20 @@ def profile(request, user_name): for i in liked_collections.order_by("-edited_time")[:10] ], "liked_collections_count": liked_collections.count(), - "layout": user.preference.profile_layout, + "layout": target.preference.profile_layout, }, ) def user_calendar_data(request, user_name): - if request.method != "GET": + if request.method != "GET" or not request.user.is_authenticated: raise BadRequest() - user = User.get(user_name) - if user is None or not request.user.is_authenticated: - return HttpResponse("") - max_visiblity = max_visiblity_to(request.user, user) - calendar_data = user.shelf_manager.get_calendar_data(max_visiblity) + try: + target = APIdentity.get_by_handler(user_name) + except: + return HttpResponse("unavailable") + max_visiblity = max_visiblity_to_user(request.user, target) + calendar_data = target.shelf_manager.get_calendar_data(max_visiblity) return render( request, "calendar_data.html", diff --git a/journal/views/review.py b/journal/views/review.py index 52904779..adce1029 100644 --- a/journal/views/review.py +++ b/journal/views/review.py @@ -12,9 +12,11 @@ from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from journal.models.renderers import convert_leading_space_in_md, render_md +from mastodon.api import share_review from users.models import User +from users.models.apidentity import APIdentity from ..forms import * from ..models import * @@ -32,7 +34,7 @@ def review_retrieve(request, review_uuid): @login_required -def review_edit(request, item_uuid, review_uuid=None): +def review_edit(request: AuthedHttpRequest, item_uuid, review_uuid=None): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) review = ( get_object_or_404(Review, uid=get_uuid_or_404(review_uuid)) @@ -65,24 +67,28 @@ def review_edit(request, item_uuid, review_uuid=None): if form.is_valid(): mark_date = None if request.POST.get("mark_anotherday"): - dt = parse_datetime(request.POST.get("mark_date") + " 20:00:00") + dt = parse_datetime(request.POST.get("mark_date", "") + " 20:00:00") mark_date = ( dt.replace(tzinfo=timezone.get_current_timezone()) if dt else None ) body = form.instance.body if request.POST.get("leading_space"): body = convert_leading_space_in_md(body) - review = Review.review_item_by_user( + review = Review.update_item_review( item, - request.user, + request.user.identity, form.cleaned_data["title"], body, form.cleaned_data["visibility"], mark_date, - form.cleaned_data["share_to_mastodon"], ) if not review: raise BadRequest() + if ( + form.cleaned_data["share_to_mastodon"] + and request.user.mastodon_username + ): + share_review(review) return redirect(reverse("journal:review_retrieve", args=[review.uuid])) else: raise BadRequest() @@ -90,7 +96,6 @@ def review_edit(request, item_uuid, review_uuid=None): raise BadRequest() -@login_required def user_review_list(request, user_name, item_category): return render_list(request, user_name, "review", item_category=item_category) @@ -100,16 +105,16 @@ def user_review_list(request, user_name, item_category): class ReviewFeed(Feed): def get_object(self, request, id): - return User.get(id) + return APIdentity.get_by_handler(id) - def title(self, user): - return "%s的评论" % user.display_name if user else "无效链接" + def title(self, owner): + return "%s的评论" % owner.display_name if owner else "无效链接" - def link(self, user): - return user.url if user else settings.SITE_INFO["site_url"] + def link(self, owner): + return owner.url if owner else settings.SITE_INFO["site_url"] - def description(self, user): - return "%s的评论合集 - NeoDB" % user.display_name if user else "无效链接" + def description(self, owner): + return "%s的评论合集 - NeoDB" % owner.display_name if owner else "无效链接" def items(self, user): if user is None or user.preference.no_anonymous_view: diff --git a/journal/views/tag.py b/journal/views/tag.py index b2847349..38c6ec73 100644 --- a/journal/views/tag.py +++ b/journal/views/tag.py @@ -13,29 +13,25 @@ from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required PAGE_SIZE = 10 @login_required +@target_identity_required def user_tag_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - tags = Tag.objects.filter(owner=user) - if user != request.user: + target = request.target + tags = Tag.objects.filter(owner=target) + if target.user != request.user: tags = tags.filter(visibility=0) tags = tags.values("title").annotate(total=Count("members")).order_by("-total") return render( request, "user_tag_list.html", { - "user": user, + "user": target.user, + "identity": target, "tags": tags, }, ) @@ -47,7 +43,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.GET.get("tag", ""), replace=False) if not tag_title: raise Http404() - tag = Tag.objects.filter(owner=request.user, title=tag_title).first() + tag = Tag.objects.filter(owner=request.user.identity, title=tag_title).first() if not tag: raise Http404() return render(request, "tag_edit.html", {"tag": tag}) @@ -55,7 +51,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.POST.get("title", ""), replace=False) tag_id = request.POST.get("id") tag = ( - Tag.objects.filter(owner=request.user, id=tag_id).first() + Tag.objects.filter(owner=request.user.identity, id=tag_id).first() if tag_id else None ) @@ -70,7 +66,9 @@ def user_tag_edit(request): ) elif ( tag_title != tag.title - and Tag.objects.filter(owner=request.user, title=tag_title).exists() + and Tag.objects.filter( + owner=request.user.identity, title=tag_title + ).exists() ): msg.error(request.user, _("标签已存在")) return HttpResponseRedirect(request.META.get("HTTP_REFERER")) @@ -88,6 +86,5 @@ def user_tag_edit(request): raise BadRequest() -@login_required def user_tag_member_list(request, user_name, tag_title): return render_list(request, user_name, "tagmember", tag_title=tag_title) diff --git a/mastodon/api.py b/mastodon/api.py index 5effcf70..6e88b0b1 100644 --- a/mastodon/api.py +++ b/mastodon/api.py @@ -1,5 +1,5 @@ import functools -import logging +import html import random import re import string @@ -67,6 +67,56 @@ def get_api_domain(domain): # low level api below +def boost_toot(site, token, toot_url): + domain = get_api_domain(site) + headers = { + "User-Agent": USER_AGENT, + "Authorization": f"Bearer {token}", + } + url = ( + "https://" + + domain + + API_SEARCH + + "?type=statuses&resolve=true&q=" + + quote(toot_url) + ) + try: + response = get(url, headers=headers) + if response.status_code != 200: + logger.error(f"Error search {toot_url} on {domain} {response.status_code}") + return None + j = response.json() + if "statuses" in j and len(j["statuses"]) > 0: + s = j["statuses"][0] + if s["uri"] != toot_url and s["url"] != toot_url: + logger.error( + f"Error status url mismatch {s['uri']} or {s['uri']} != {toot_url}" + ) + return None + if s["reblogged"]: + logger.info(f"Already boosted {toot_url}") + # TODO unboost and boost again? + return None + url = ( + "https://" + + domain + + API_PUBLISH_TOOT + + "/" + + j["statuses"][0]["id"] + + "/reblog" + ) + response = post(url, headers=headers) + if response.status_code != 200: + logger.error( + f"Error search {toot_url} on {domain} {response.status_code}" + ) + return None + return response.json() + except Exception: + logger.error(f"Error search {toot_url} on {domain}") + return None + + def post_toot( site, content, @@ -193,7 +243,7 @@ def detect_server_info(login_domain): try: response = get(url, headers={"User-Agent": USER_AGENT}) except Exception as e: - logger.error(f"Error connecting {login_domain} {e}") + logger.error(f"Error connecting {login_domain}: {e}") raise Exception(f"无法连接 {login_domain}") if response.status_code != 200: logger.error(f"Error connecting {login_domain}: {response.status_code}") @@ -363,7 +413,7 @@ def get_visibility(visibility, user): def share_mark(mark): from catalog.common import ItemCategory - user = mark.owner + user = mark.owner.user if mark.visibility == 2: visibility = TootVisibilityEnum.DIRECT elif mark.visibility == 1: @@ -466,10 +516,10 @@ def share_collection(collection, comment, user, visibility_no): ) user_str = ( "我" - if user == collection.owner + if user == collection.owner.user else ( - " @" + collection.owner.mastodon_acct + " " - if collection.owner.mastodon_acct + " @" + collection.owner.user.mastodon_acct + " " + if collection.owner.user.mastodon_acct else " " + collection.owner.username + " " ) ) diff --git a/misc/bin/neodb-hello b/misc/bin/neodb-hello new file mode 100755 index 00000000..17931bba --- /dev/null +++ b/misc/bin/neodb-hello @@ -0,0 +1,21 @@ +#!/bin/sh +echo '\033[0;35m====== Welcome to NeoDB ======\033[0m' +cat /neodb/version +echo Your configuration is for ${NEODB_SITE_NAME} on ${NEODB_SITE_DOMAIN} +[[ -z "${NEODB_DEBUG}" ]] || echo DEBUG is ON, showing environment variables: +[[ -z "${NEODB_DEBUG}" ]] || env +[[ -z "${NEODB_DEBUG}" ]] || echo Running some basic checks... +[[ -z "${NEODB_DEBUG}" ]] || neodb-manage check +[[ -z "${NEODB_DEBUG}" ]] || TAKAHE_DATABASE_SERVER="postgres://x@y/z" TAKAHE_SECRET_KEY="t" TAKAHE_MAIN_DOMAIN="x.y" takahe-manage check +[[ -z "${NEODB_DEBUG}" ]] || echo check complete. +cat < +start NeoDB instance: docker compose --profile up -d +stop NeoDB instance: docker compose --profile down -d +update NeoDB instance: docker compose --profile pull + +Please follow instructions on https://neodb.net to configure and run your instance. + +EOF diff --git a/misc/bin/neodb-init b/misc/bin/neodb-init new file mode 100755 index 00000000..278d6a5e --- /dev/null +++ b/misc/bin/neodb-init @@ -0,0 +1,14 @@ +#!/bin/sh +echo '\033[0;35m====== Welcome to NeoDB ======\033[0m' +cat /neodb/version +echo Your configuration is for ${NEODB_SITE_NAME} on ${NEODB_SITE_DOMAIN} +[[ -z "${NEODB_DEBUG}" ]] || echo DEBUG is ON, show environment: +[[ -z "${NEODB_DEBUG}" ]] || env +echo +echo NeoDB initializing... + +takahe-manage migrate || exit $? +neodb-manage migrate || exit $? +neodb-manage cron --schedule || exit $? + +echo NeoDB initialization complete. diff --git a/misc/bin/neodb-manage b/misc/bin/neodb-manage new file mode 100755 index 00000000..86a946f2 --- /dev/null +++ b/misc/bin/neodb-manage @@ -0,0 +1,2 @@ +#!/bin/sh +cd /neodb && ${NEODB_VENV}/bin/python manage.py $@ diff --git a/misc/bin/nginx-start b/misc/bin/nginx-start new file mode 100755 index 00000000..d2148be7 --- /dev/null +++ b/misc/bin/nginx-start @@ -0,0 +1,4 @@ +#!/bin/sh +chown app:app /www/media /www/m +envsubst '${NEODB_WEB_SERVER} ${TAKAHE_WEB_SERVER}' < $NGINX_CONF > /etc/nginx/conf.d/neodb.conf +nginx -g 'daemon off;' diff --git a/misc/bin/takahe-manage b/misc/bin/takahe-manage new file mode 100755 index 00000000..79dd4922 --- /dev/null +++ b/misc/bin/takahe-manage @@ -0,0 +1,2 @@ +#!/bin/sh +cd /takahe && ${TAKAHE_VENV}/bin/python manage.py $@ diff --git a/misc/dev-reset.sh b/misc/dev-reset.sh deleted file mode 100755 index ecfd89b0..00000000 --- a/misc/dev-reset.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh -# Reset databases and migrations, for development only - -[ -f manage.py ] || exit $1 - -echo "\033[0;31mWARNING: this script will destroy all neodb databases and migrations" -while true; do - read -p "Do you wish to continue? (yes/no) " yn - case $yn in - [Yy]* ) break;; - [Nn]* ) exit;; - esac -done - -psql $* postgres -c "DROP DATABASE IF EXISTS neodb;" || exit $? - -psql $* postgres -c "DROP DATABASE IF EXISTS test_neodb;" || exit $? - -psql $* postgres -c "CREATE DATABASE neodb ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE template0;" || exit $? - -find -type d -name migrations | xargs rm -rf - -python3 manage.py makemigrations mastodon users management common catalog journal social legacy - -python3 manage.py migrate || exit $? - -psql $* neodb -c "CREATE DATABASE test_neodb WITH TEMPLATE neodb;" || exit $? - -python3 manage.py check diff --git a/misc/nginx.conf.d/neodb-dev.conf b/misc/nginx.conf.d/neodb-dev.conf new file mode 100644 index 00000000..7050f90f --- /dev/null +++ b/misc/nginx.conf.d/neodb-dev.conf @@ -0,0 +1,99 @@ +proxy_cache_path /www/cache levels=1:2 keys_zone=takahe:20m inactive=14d max_size=1g; + +upstream neodb { + server ${NEODB_WEB_SERVER}; +} + +upstream takahe { + server ${TAKAHE_WEB_SERVER}; +} + +server { + listen 8000; + + charset utf-8; + ignore_invalid_headers on; + client_max_body_size 100M; + client_body_buffer_size 128k; + proxy_connect_timeout 900; + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-Proto https; + proxy_http_version 1.1; + proxy_hide_header X-Takahe-User; + proxy_hide_header X-Takahe-Identity; + + # allow admin to serv their own robots.txt/favicon.ico/... + location ~ ^/\w+\.\w+$ { + root /www/root; + access_log off; + log_not_found off; + } + location /m/ { + alias /www/m/; + add_header Cache-Control "public, max-age=604800, immutable"; + } + # Proxies media and remote media with caching + location ~* ^/(media|proxy) { + # Cache media and proxied resources + proxy_cache takahe; + proxy_cache_key $host$uri; + proxy_cache_valid 200 304 4h; + proxy_cache_valid 301 307 4h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 1h; + add_header X-Cache $upstream_cache_status; + + # Signal to Takahē that we support full URI accel proxying + proxy_set_header X-Takahe-Accel true; + proxy_pass http://takahe; + } + # Internal target for X-Accel redirects that stashes the URI in a var + location /__takahe_accel__/ { + internal; + set $takahe_realuri $upstream_http_x_takahe_realuri; + rewrite ^/(.+) /__takahe_accel__/real/; + } + # Real internal-only target for X-Accel redirects + location /__takahe_accel__/real/ { + # Only allow internal redirects + internal; + + # # Reconstruct the remote URL + resolver 9.9.9.9 8.8.8.8 valid=300s; + + # Unset Authorization and Cookie for security reasons. + proxy_set_header Authorization ''; + proxy_set_header Cookie ''; + proxy_set_header User-Agent 'takahe/nginx'; + proxy_set_header Host $proxy_host; + proxy_set_header X-Forwarded-For ''; + proxy_set_header X-Forwarded-Host ''; + proxy_set_header X-Forwarded-Server ''; + proxy_set_header X-Real-Ip ''; + + # Stops the local disk from being written to (just forwards data through) + proxy_max_temp_file_size 0; + + # Proxy the remote file through to the client + proxy_pass $takahe_realuri; + proxy_ssl_server_name on; + add_header X-Takahe-Accel "HIT"; + + # Cache these responses too + proxy_cache takahe; + # Cache after a single request + proxy_cache_min_uses 1; + proxy_cache_key $takahe_realuri; + proxy_cache_valid 200 304 720h; + proxy_cache_valid 301 307 12h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 72h; + add_header X-Cache $upstream_cache_status; + } + location ~* ^/(static|@|\.well-known|actor|inbox|api/v1|api/v2|auth|oauth|tags|settings|media|proxy|admin|djadmin) { + proxy_pass http://takahe; + } + location / { + proxy_pass http://neodb; + } +} diff --git a/misc/nginx.conf.d/neodb.conf b/misc/nginx.conf.d/neodb.conf index 42e8e001..4293b2a2 100644 --- a/misc/nginx.conf.d/neodb.conf +++ b/misc/nginx.conf.d/neodb.conf @@ -1,22 +1,107 @@ +proxy_cache_path /www/cache levels=1:2 keys_zone=takahe:20m inactive=14d max_size=1g; + +upstream neodb { + server ${NEODB_WEB_SERVER}; +} + +upstream takahe { + server ${TAKAHE_WEB_SERVER}; +} + server { - server_name neodb.social; listen 8000; - location = /favicon.ico { - root /www; - access_log off; log_not_found off; + + charset utf-8; + ignore_invalid_headers on; + client_max_body_size 100M; + client_body_buffer_size 128k; + proxy_connect_timeout 900; + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-Proto https; + proxy_http_version 1.1; + proxy_hide_header X-Takahe-User; + proxy_hide_header X-Takahe-Identity; + + # allow admin to serv their own robots.txt/favicon.ico/... + location ~ ^/\w+\.\w+$ { + root /www/root; + access_log off; + log_not_found off; } - location / { - client_max_body_size 100M; - proxy_set_header Host $http_host; - proxy_set_header X-Forwarded-Proto https; - proxy_pass http://neodb-web:8000; + location /static/ { + alias /takahe/static-collected/; + add_header Cache-Control "public, max-age=604800, immutable"; + } + location /s/ { + alias /neodb/static/; + add_header Cache-Control "public, max-age=604800, immutable"; } + location /m/ { + alias /www/m/; + add_header Cache-Control "public, max-age=604800, immutable"; + } + # Proxies media and remote media with caching + location ~* ^/(media|proxy) { + # Cache media and proxied resources + proxy_cache takahe; + proxy_cache_key $host$uri; + proxy_cache_valid 200 304 4h; + proxy_cache_valid 301 307 4h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 1h; + add_header X-Cache $upstream_cache_status; - location /static/ { - root /www; + # Signal to Takahē that we support full URI accel proxying + proxy_set_header X-Takahe-Accel true; + proxy_pass http://takahe; + } + # Internal target for X-Accel redirects that stashes the URI in a var + location /__takahe_accel__/ { + internal; + set $takahe_realuri $upstream_http_x_takahe_realuri; + rewrite ^/(.+) /__takahe_accel__/real/; } + # Real internal-only target for X-Accel redirects + location /__takahe_accel__/real/ { + # Only allow internal redirects + internal; + + # # Reconstruct the remote URL + resolver 9.9.9.9 8.8.8.8 valid=300s; + + # Unset Authorization and Cookie for security reasons. + proxy_set_header Authorization ''; + proxy_set_header Cookie ''; + proxy_set_header User-Agent 'takahe/nginx'; + proxy_set_header Host $proxy_host; + proxy_set_header X-Forwarded-For ''; + proxy_set_header X-Forwarded-Host ''; + proxy_set_header X-Forwarded-Server ''; + proxy_set_header X-Real-Ip ''; - location /media/ { - root /www; + # Stops the local disk from being written to (just forwards data through) + proxy_max_temp_file_size 0; + + # Proxy the remote file through to the client + proxy_pass $takahe_realuri; + proxy_ssl_server_name on; + add_header X-Takahe-Accel "HIT"; + + # Cache these responses too + proxy_cache takahe; + # Cache after a single request + proxy_cache_min_uses 1; + proxy_cache_key $takahe_realuri; + proxy_cache_valid 200 304 720h; + proxy_cache_valid 301 307 12h; + proxy_cache_valid 500 502 503 504 0s; + proxy_cache_valid any 72h; + add_header X-Cache $upstream_cache_status; + } + location ~* ^/(@|\.well-known|actor|inbox|api/v1|api/v2|auth|oauth|tags|settings|media|proxy|admin|djadmin) { + proxy_pass http://takahe; + } + location / { + proxy_pass http://neodb; } } diff --git a/misc/upgrade.sh b/misc/upgrade.sh deleted file mode 100755 index 9887bb28..00000000 --- a/misc/upgrade.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/sh -[ -f manage.py ] || exit -echo Dry Run MakeMigrations: -python3 manage.py makemigrations --dry-run || exit $? -echo Planned Migrations: -python3 manage.py migrate --plan || exit $? - -while true; do - read -p "Do you wish to continue? (yes/no) " yn - case $yn in - [Yy]* ) break;; - [Nn]* ) exit;; - esac -done - -echo "Generating static files..." -python3 manage.py compilescss || exit $? -python3 manage.py collectstatic --noinput || exit $? - -echo "Migrating database..." -python3 manage.py migrate || exit $? - -echo "Checking..." -python3 manage.py check || exit $? - -echo "Done. You may reload app, worker and cron" diff --git a/neodb-takahe b/neodb-takahe new file mode 160000 index 00000000..25ead63f --- /dev/null +++ b/neodb-takahe @@ -0,0 +1 @@ +Subproject commit 25ead63fb1cacb34cf3f8a2e0706843636f78034 diff --git a/neodb.env.dist b/neodb.env.dist deleted file mode 100644 index ecb44ed3..00000000 --- a/neodb.env.dist +++ /dev/null @@ -1,6 +0,0 @@ -NEODB_SECRET_KEY=change_me -NEODB_SITE_NAME=Example Site -NEODB_SITE_DOMAIN=example.site -#NEODB_PORT=8000 -#NEODB_SSL=1 -#NEODB_DATA=/var/lib/neodb diff --git a/neodb.env.example b/neodb.env.example new file mode 100644 index 00000000..b89eba52 --- /dev/null +++ b/neodb.env.example @@ -0,0 +1,36 @@ +# NEODB Configuration + +# copy along with compose.yml, rename this file to .env + +# Change these before start the instance for the first time!! +NEODB_SECRET_KEY=change_me +NEODB_SITE_NAME=Example Site +NEODB_SITE_DOMAIN=example.site + +# Change these too +NEODB_SITE_INTRO=/welcome.html +NEODB_SITE_LOGO=/logo.png +NEODB_SITE_ICON=/icon.png +NEODB_SITE_LINKS=@NiceDB=https://donotban.com/@testie,@NeoDB=https://mastodon.social/@neodb + +# Turn off DEBUG only when you are ready for production service +# NEODB_DEBUG=False + +# HTTP port your reverse proxy should send request to +# NEODB_PORT=8000 + +# Path to store db/media/cache/etc, must be writable +# NEODB_DATA=/var/lib/neodb + +# Users with these names will be promoted to admin during next boot/migration +# NEODB_ADMIN_USERNAMES = eggplant,aubergine + +# Scaling parameters +# NEODB_WEB_WORKER_NUM=32 +# TAKAHE_WEB_WORKER_NUM=32 +# TAKAHE_STATOR_CONCURRENCY=10 +# TAKAHE_STATOR_CONCURRENCY_PER_MODEL=10 + +# pull NeoDB Docker image from a specific tag +# see available tags: https://hub.docker.com/r/neodb/neodb/tags +# NEODB_IMAGE=neodb/neodb:main diff --git a/pyproject.toml b/pyproject.toml index b242d65f..cd6bdd9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.pyright] -exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/sites/douban_*" ] +exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ] [tool.djlint] ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031" diff --git a/requirements-dev.txt b/requirements-dev.txt index b146c739..da3c30f8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,5 +4,6 @@ django-debug-toolbar django-stubs djlint~=1.32.1 isort~=5.12.0 +lxml-stubs pre-commit -pyright==1.1.322 +pyright==1.1.332 diff --git a/requirements.txt b/requirements.txt index 4216648f..0c6f7243 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,12 +1,13 @@ +cachetools dateparser discord.py -django~=4.2.4 +django~=4.2.6 django-anymail -django-auditlog -django-auditlog @ git+https://github.com/jazzband/django-auditlog.git@45591463e8192b4ac0095e259cc4dcea0ac2fd6c +django-auditlog>=3.0.0-beta.2 django-bleach django-compressor django-cors-headers +django-environ django-hijack django-jsonform django-maintenance-mode @@ -24,8 +25,8 @@ django-user-messages dnspython easy-thumbnails filetype -fontawesomefree gunicorn +httpx igdb-api-v4 libsass listparser @@ -33,7 +34,6 @@ loguru lxml markdownify mistune -opencc openpyxl podcastparser psycopg2-binary @@ -42,3 +42,4 @@ rq>=1.12.0 setproctitle tqdm typesense +urlman diff --git a/social/migrations/0007_alter_localactivity_owner.py b/social/migrations/0007_alter_localactivity_owner.py new file mode 100644 index 00000000..f7e3176b --- /dev/null +++ b/social/migrations/0007_alter_localactivity_owner.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:26 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0012_apidentity"), + ("social", "0006_alter_localactivity_template"), + ] + + operations = [ + migrations.AlterField( + model_name="localactivity", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + ] diff --git a/social/models.py b/social/models.py index f0e4190e..c89197e8 100644 --- a/social/models.py +++ b/social/models.py @@ -27,7 +27,7 @@ ShelfMember, UserOwnedObjectMixin, ) -from users.models import User +from users.models import APIdentity _logger = logging.getLogger(__name__) @@ -42,10 +42,8 @@ class ActivityTemplate(models.TextChoices): class LocalActivity(models.Model, UserOwnedObjectMixin): - owner = models.ForeignKey(User, on_delete=models.CASCADE) - visibility = models.PositiveSmallIntegerField( - default=0 - ) # 0: Public / 1: Follower only / 2: Self only + owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) # type: ignore + visibility = models.PositiveSmallIntegerField(default=0) # type: ignore template = models.CharField( blank=False, choices=ActivityTemplate.choices, max_length=50 ) @@ -62,11 +60,11 @@ def __str__(self): class ActivityManager: - def __init__(self, user): - self.owner = user + def __init__(self, owner: APIdentity): + self.owner = owner def get_timeline(self, before_time=None): - following = [x for x in self.owner.following if x not in self.owner.ignoring] + following = [x for x in self.owner.following if x not in self.owner.muting] q = Q(owner_id__in=following, visibility__lt=2) | Q(owner=self.owner) if before_time: q = q & Q(created_time__lt=before_time) @@ -205,5 +203,5 @@ def updated(self): super().updated() -def reset_social_visibility_for_user(user: User, visibility: int): - LocalActivity.objects.filter(owner=user).update(visibility=visibility) +def reset_social_visibility_for_user(owner: APIdentity, visibility: int): + LocalActivity.objects.filter(owner=owner).update(visibility=visibility) diff --git a/social/templates/activity/comment_child_item.html b/social/templates/activity/comment_child_item.html index a1d92c68..c048b3d2 100644 --- a/social/templates/activity/comment_child_item.html +++ b/social/templates/activity/comment_child_item.html @@ -53,7 +53,7 @@ {% endif %} - +
diff --git a/social/templates/activity/mark_item.html b/social/templates/activity/mark_item.html index 083ffd2b..8a52f79a 100644 --- a/social/templates/activity/mark_item.html +++ b/social/templates/activity/mark_item.html @@ -40,7 +40,7 @@ {% endif %} - +
diff --git a/social/templates/activity/review_item.html b/social/templates/activity/review_item.html index 277d0a83..3092cd71 100644 --- a/social/templates/activity/review_item.html +++ b/social/templates/activity/review_item.html @@ -33,7 +33,7 @@ {% endif %} - +
diff --git a/social/templates/feed.html b/social/templates/feed.html index a17cb8be..7f9b9bfa 100644 --- a/social/templates/feed.html +++ b/social/templates/feed.html @@ -31,7 +31,7 @@
好友动态
- {% include "_sidebar.html" with show_progress=1 %} + {% include "_sidebar.html" with show_progress=1 identity=request.user.identity %}
{% include "_footer.html" %} diff --git a/social/tests.py b/social/tests.py index 3d6093f2..b881977e 100644 --- a/social/tests.py +++ b/social/tests.py @@ -2,65 +2,86 @@ from catalog.models import * from journal.models import * +from takahe.utils import Takahe from users.models import User from .models import * class SocialTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") self.movie = Edition.objects.create(title="Fight Club") - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") + self.alice = User.register( + username="Alice", mastodon_site="MySpace", mastodon_username="Alice" + ) + self.bob = User.register( + username="Bob", mastodon_site="KKCity", mastodon_username="Bob" + ) def test_timeline(self): + alice_feed = self.alice.identity.activity_manager + bob_feed = self.bob.identity.activity_manager + # alice see 0 activity in timeline in the beginning - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) + self.assertEqual(len(alice_feed.get_timeline()), 0) # 1 activity after adding first book to shelf - self.alice.shelf_manager.move_item(self.book1, ShelfType.WISHLIST, visibility=1) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 1) + self.alice.identity.shelf_manager.move_item( + self.book1, ShelfType.WISHLIST, visibility=1 + ) + self.assertEqual(len(alice_feed.get_timeline()), 1) # 2 activities after adding second book to shelf - self.alice.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) + self.assertEqual(len(alice_feed.get_timeline()), 2) # 2 activities after change first mark - self.alice.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) + self.assertEqual(len(alice_feed.get_timeline()), 2) # bob see 0 activity in timeline in the beginning - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 0) + self.assertEqual(len(bob_feed.get_timeline()), 0) # bob follows alice, see 2 activities - self.bob.mastodon_following = ["Alice@MySpace"] - self.alice.mastodon_follower = ["Bob@KKCity"] - self.bob.merge_relationships() - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # bob mute, then unmute alice + self.bob.identity.mute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + self.bob.identity.unmute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) # alice:3 bob:2 after alice adding second book to shelf as private - self.alice.shelf_manager.move_item(self.movie, ShelfType.WISHLIST, visibility=2) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 3) - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) - - # remote unfollow - self.bob.mastodon_following = [] - self.alice.mastodon_follower = [] - self.bob.merge_relationships() - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) - - # local follow - self.bob.follow(self.alice) - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item( + self.movie, ShelfType.WISHLIST, visibility=2 + ) + self.assertEqual(len(alice_feed.get_timeline()), 3) + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # alice mute bob + self.alice.identity.mute(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # bob unfollow alice + self.bob.identity.unfollow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + + # bob follow alice + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # alice block bob + self.alice.identity.block(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) diff --git a/social/views.py b/social/views.py index 9a68b775..22bfd505 100644 --- a/social/views.py +++ b/social/views.py @@ -1,9 +1,9 @@ import logging -from django.conf import settings -from django.contrib.auth.decorators import login_required, permission_required +from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest -from django.shortcuts import render +from django.shortcuts import redirect, render +from django.urls import reverse from django.utils.translation import gettext_lazy as _ from catalog.models import * @@ -20,6 +20,8 @@ def feed(request): if request.method != "GET": raise BadRequest() + if not request.user.registration_complete: + return redirect(reverse("users:register")) user = request.user podcast_ids = [ p.item_id @@ -65,7 +67,7 @@ def data(request): request, "feed_data.html", { - "activities": ActivityManager(request.user).get_timeline( + "activities": ActivityManager(request.user.identity).get_timeline( before_time=request.GET.get("last") )[:PAGE_SIZE], }, diff --git a/takahe/__init__.py b/takahe/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/admin.py b/takahe/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/takahe/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/takahe/ap_handlers.py b/takahe/ap_handlers.py new file mode 100644 index 00000000..3c9ce8cd --- /dev/null +++ b/takahe/ap_handlers.py @@ -0,0 +1,123 @@ +from datetime import datetime + +from loguru import logger + +from catalog.common import * +from journal.models import Comment, Piece, Rating, Review, ShelfMember +from users.models import User as NeoUser + +from .models import Follow, Identity, Post +from .utils import Takahe + +_supported_ap_catalog_item_types = [ + "Edition", + "Movie", + "TVShow", + "TVSeason", + "TVEpisode", + "Album", + "Game", + "Podcast", + "Performance", + "PerformanceProduction", +] + +_supported_ap_journal_types = { + "Status": ShelfMember, + "Rating": Rating, + "Comment": Comment, + "Review": Review, +} + + +def _parse_links(objects): + logger.debug(f"Parsing links from {objects}") + items = [] + pieces = [] + for obj in objects: + if obj["type"] in _supported_ap_catalog_item_types: + items.append(obj["url"]) + elif obj["type"] in _supported_ap_journal_types.keys(): + pieces.append(obj) + else: + logger.warning(f'Unknown link type {obj["type"]}') + return items, pieces + + +def _get_or_create_item_by_ap_url(url): + logger.debug(f"Fetching item by ap from {url}") + site = SiteManager.get_site_by_url(url) + if not site: + return None + site.get_resource_ready() + item = site.get_item() + return item + + +def _get_visibility(post_visibility): + match post_visibility: + case 2: + return 1 + case 3: + return 2 + case _: + return 0 + + +def _update_or_create_post(pk, obj): + post = Post.objects.get(pk=pk) + owner = Takahe.get_or_create_remote_apidentity(post.author) + if not post.type_data: + logger.warning(f"Post {post} has no type_data") + return + items, pieces = _parse_links(post.type_data["object"]["relatedWith"]) + logger.info(f"Post {post} has items {items} and pieces {pieces}") + if len(items) == 0: + logger.warning(f"Post {post} has no remote items") + return + elif len(items) > 1: + logger.warning(f"Post {post} has more than one remote item") + return + remote_url = items[0] + item = _get_or_create_item_by_ap_url(remote_url) + if not item: + logger.warning(f"Post {post} has no local item") + return + for p in pieces: + cls = _supported_ap_journal_types[p["type"]] + cls.update_by_ap_object(owner, item, p, pk, _get_visibility(post.visibility)) + + +def post_created(pk, obj): + _update_or_create_post(pk, obj) + + +def post_updated(pk, obj): + _update_or_create_post(pk, obj) + + +def post_deleted(pk, obj): + Piece.objects.filter(posts__id=pk, local=False).delete() + + +def user_follow_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_following(u) + logger.info(f"User {u} following updated") + + +def user_mute_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_muting(u) + logger.info(f"User {u} muting updated") + + +def user_block_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") + u = Takahe.get_local_user_by_identity(target_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") diff --git a/takahe/apps.py b/takahe/apps.py new file mode 100644 index 00000000..7d39fe99 --- /dev/null +++ b/takahe/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class TakaheConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "takahe" diff --git a/takahe/db_routes.py b/takahe/db_routes.py new file mode 100644 index 00000000..09015105 --- /dev/null +++ b/takahe/db_routes.py @@ -0,0 +1,27 @@ +from django.conf import settings + +_is_testing = "testserver" in settings.ALLOWED_HOSTS + + +class TakaheRouter: + def db_for_read(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def db_for_write(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def allow_relation(self, obj1, obj2, **hints): + # skip this check but please make sure + # not create relations between takahe models and other apps + if obj1._meta.app_label == "takahe" or obj2._meta.app_label == "takahe": + return obj1._meta.app_label == obj2._meta.app_label + return None + + def allow_migrate(self, db, app_label, model_name=None, **hints): + if app_label == "takahe" or db == "takahe": + return _is_testing and app_label == db + return None diff --git a/takahe/html.py b/takahe/html.py new file mode 100644 index 00000000..c598be2f --- /dev/null +++ b/takahe/html.py @@ -0,0 +1,379 @@ +import html +import re +from html.parser import HTMLParser + +from django.utils.safestring import mark_safe + + +class FediverseHtmlParser(HTMLParser): + """ + A custom HTML parser that only allows a certain tag subset and behaviour: + - br, p tags are passed through + - a tags are passed through if they're not hashtags or mentions + - Another set of tags are converted to p + + It also linkifies URLs, mentions, hashtags, and imagifies emoji. + """ + + REWRITE_TO_P = [ + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "blockquote", + "pre", + "ul", + "ol", + ] + + REWRITE_TO_BR = [ + "br", + "li", + ] + + MENTION_REGEX = re.compile( + r"(^|[^\w\d\-_/])@([\w\d\-_]+(?:@[\w\d\-_\.]+[\w\d\-_]+)?)" + ) + + HASHTAG_REGEX = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + EMOJI_REGEX = re.compile(r"\B:([a-zA-Z0-9(_)-]+):\B") + + URL_REGEX = re.compile( + r"""(\(* # Match any opening parentheses. + \b(?"]*)?) + # /path/zz (excluding "unsafe" chars from RFC 1738, + # except for # and ~, which happen in practice) + """, + re.IGNORECASE | re.VERBOSE | re.UNICODE, + ) + + def __init__( + self, + html: str, + uri_domain: str | None = None, + mentions: list | None = None, + find_mentions: bool = False, + find_hashtags: bool = False, + find_emojis: bool = False, + emoji_domain=None, + ): + super().__init__() + self.uri_domain = uri_domain + self.emoji_domain = emoji_domain + self.find_mentions = find_mentions + self.find_hashtags = find_hashtags + self.find_emojis = find_emojis + self.calculate_mentions(mentions) + self._data_buffer = "" + self.html_output = "" + self.text_output = "" + self.emojis: set[str] = set() + self.mentions: set[str] = set() + self.hashtags: set[str] = set() + self._pending_a: dict | None = None + self._fresh_p = False + self.feed(html.replace("\n", "")) + self.flush_data() + + def calculate_mentions(self, mentions: list | None): + """ + Prepares a set of content that we expect to see mentions look like + (this imp) + """ + self.mention_matches: dict[str, str] = {} + self.mention_aliases: dict[str, str] = {} + for mention in mentions or []: + if self.uri_domain: + url = mention.absolute_profile_uri() + else: + url = str(mention.urls.view) + if mention.username: + username = mention.username.lower() + domain = mention.domain_id.lower() + self.mention_matches[f"{username}"] = url + self.mention_matches[f"{username}@{domain}"] = url + self.mention_matches[mention.absolute_profile_uri()] = url + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + elif tag in self.REWRITE_TO_BR: + self.flush_data() + if not self._fresh_p: + self.html_output += "
" + self.text_output += "\n" + elif tag == "a": + self.flush_data() + self._pending_a = {"attrs": dict(attrs), "content": ""} + self._fresh_p = tag in self.REWRITE_TO_P + + def handle_endtag(self, tag: str) -> None: + self._fresh_p = False + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + self.text_output += "\n\n" + elif tag == "a": + if self._pending_a: + href = self._pending_a["attrs"].get("href") + content = self._pending_a["content"].strip() + has_ellipsis = "ellipsis" in self._pending_a["attrs"].get("class", "") + # Is it a mention? + if content.lower().lstrip("@") in self.mention_matches: + self.html_output += self.create_mention(content, href) + self.text_output += content + # Is it a hashtag? + elif self.HASHTAG_REGEX.match(content): + self.html_output += self.create_hashtag(content) + self.text_output += content + elif content: + # Shorten the link if we need to + self.html_output += self.create_link( + href, + content, + has_ellipsis=has_ellipsis, + ) + self.text_output += href + self._pending_a = None + + def handle_data(self, data: str) -> None: + self._fresh_p = False + if self._pending_a: + self._pending_a["content"] += data + else: + self._data_buffer += data + + def flush_data(self) -> None: + """ + We collect data segments until we encounter a tag we care about, + so we can treat #hashtag as #hashtag + """ + self.text_output += self._data_buffer + self.html_output += self.linkify(self._data_buffer) + self._data_buffer = "" + + def create_link(self, href, content, has_ellipsis=False): + """ + Generates a link, doing optional shortening. + + All return values from this function should be HTML-safe. + """ + looks_like_link = bool(self.URL_REGEX.match(content)) + if looks_like_link: + protocol, content = content.split("://", 1) + else: + protocol = "" + if (looks_like_link and len(content) > 30) or has_ellipsis: + return f'{html.escape(content[:30])}' + elif looks_like_link: + return f'{html.escape(content)}' + else: + return f'{html.escape(content)}' + + def create_mention(self, handle, href: str | None = None) -> str: + """ + Generates a mention link. Handle should have a leading @. + + All return values from this function should be HTML-safe + """ + handle = handle.lstrip("@") + if "@" in handle: + short_handle = handle.split("@", 1)[0] + else: + short_handle = handle + handle_hash = handle.lower() + short_hash = short_handle.lower() + self.mentions.add(handle_hash) + url = self.mention_matches.get(handle_hash) + # If we have a captured link out, use that as the actual resolver + if href and href in self.mention_matches: + url = self.mention_matches[href] + if url: + if short_hash not in self.mention_aliases: + self.mention_aliases[short_hash] = handle_hash + elif self.mention_aliases.get(short_hash) != handle_hash: + short_handle = handle + return f'@{html.escape(short_handle)}' + else: + return "@" + html.escape(handle) + + def create_hashtag(self, hashtag) -> str: + """ + Generates a hashtag link. Hashtag does not need to start with # + + All return values from this function should be HTML-safe + """ + hashtag = hashtag.lstrip("#") + self.hashtags.add(hashtag.lower()) + if self.uri_domain: + return f'' + else: + return f'' + + def create_emoji(self, shortcode) -> str: + """ + Generates an emoji tag + + All return values from this function should be HTML-safe + """ + from .models import Emoji + + emoji = Emoji.get_by_domain(shortcode, self.emoji_domain) + if emoji and emoji.is_usable: + self.emojis.add(shortcode) + return emoji.as_html() + return f":{shortcode}:" + + def linkify(self, data): + """ + Linkifies some content that is plaintext. + + Handles URLs first, then mentions. Note that this takes great care to + keep track of what is HTML and what needs to be escaped. + """ + # Split the string by the URL regex so we know what to escape and what + # not to escape. + bits = self.URL_REGEX.split(data) + result = "" + # Even indices are data we should pass though, odd indices are links + for i, bit in enumerate(bits): + # A link! + if i % 2 == 1: + result += self.create_link(bit, bit) + # Not a link + elif self.mention_matches or self.find_mentions: + result += self.linkify_mentions(bit) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_mentions(self, data): + """ + Linkifies mentions + """ + bits = self.MENTION_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Mention content + if i % 3 == 2: + result += self.create_mention(bit) + # Not part of a mention (0) or mention preamble (1) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_hashtags(self, data): + """ + Linkifies hashtags + """ + bits = self.HASHTAG_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of a hashtag + if i % 2 == 0: + if self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + # Hashtag content + else: + result += self.create_hashtag(bit) + return result + + def linkify_emoji(self, data): + """ + Linkifies emoji + """ + bits = self.EMOJI_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of an emoji + if i % 2 == 0: + result += html.escape(bit) + # Emoji content + else: + result += self.create_emoji(bit) + return result + + @property + def html(self): + return self.html_output.strip() + + @property + def plain_text(self): + return self.text_output.strip() + + +class ContentRenderer: + """ + Renders HTML for posts, identity fields, and more. + + The `local` parameter affects whether links are absolute (False) or relative (True) + """ + + def __init__(self, local: bool): + self.local = local + + def render_post(self, html: str, post) -> str: + """ + Given post HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + mentions=post.mentions.all(), + uri_domain=(None if self.local else post.author.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=post.author.domain, + ) + return mark_safe(parser.html) + + def render_identity_summary(self, html: str, identity) -> str: + """ + Given identity summary HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + return mark_safe(parser.html) + + def render_identity_data(self, html: str, identity, strip: bool = False) -> str: + """ + Given name/basic value HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=False, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + if strip: + return mark_safe(parser.html) + else: + return mark_safe(parser.html) diff --git a/takahe/management/commands/takahe.py b/takahe/management/commands/takahe.py new file mode 100644 index 00000000..fc555a28 --- /dev/null +++ b/takahe/management/commands/takahe.py @@ -0,0 +1,42 @@ +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Count, F +from loguru import logger +from tqdm import tqdm + +from catalog.common import * +from catalog.common.models import * +from catalog.models import * +from journal.models import Tag, update_journal_for_merged_item +from takahe.utils import * +from users.models import User as NeoUser + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument( + "--verbose", + action="store_true", + ) + parser.add_argument( + "--sync", + action="store_true", + ) + + def sync(self): + logger.info(f"Syncing domain...") + Takahe.get_domain() + logger.info(f"Syncing users...") + for u in tqdm(NeoUser.objects.filter(is_active=True, username__isnull=False)): + Takahe.init_identity_for_local_user(u) + # Takahe.update_user_following(u) + # Takahe.update_user_muting(u) + # Takahe.update_user_rejecting(u) + + def handle(self, *args, **options): + self.verbose = options["verbose"] + + if options["sync"]: + self.sync() + + self.stdout.write(self.style.SUCCESS(f"Done.")) diff --git a/takahe/migrations/0001_initial.py b/takahe/migrations/0001_initial.py new file mode 100644 index 00000000..842280d6 --- /dev/null +++ b/takahe/migrations/0001_initial.py @@ -0,0 +1,628 @@ +# Generated by Django 4.2.4 on 2023-08-12 16:48 + +import functools + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + +import takahe.models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Domain", + fields=[ + ( + "domain", + models.CharField(max_length=250, primary_key=True, serialize=False), + ), + ( + "service_domain", + models.CharField( + blank=True, + db_index=True, + max_length=250, + null=True, + unique=True, + ), + ), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("nodeinfo", models.JSONField(blank=True, null=True)), + ("local", models.BooleanField()), + ("blocked", models.BooleanField(default=False)), + ("public", models.BooleanField(default=False)), + ("default", models.BooleanField(default=False)), + ("notes", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "users_domain", + }, + ), + migrations.CreateModel( + name="Emoji", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("shortcode", models.SlugField(max_length=100)), + ("local", models.BooleanField(default=True)), + ("public", models.BooleanField(null=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ("mimetype", models.CharField(max_length=200)), + ("file", models.ImageField(blank=True, null=True, upload_to="")), + ("remote_url", models.CharField(blank=True, max_length=500, null=True)), + ("category", models.CharField(blank=True, max_length=100, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="takahe.domain", + ), + ), + ], + options={ + "db_table": "activities_emoji", + }, + ), + migrations.CreateModel( + name="Hashtag", + fields=[ + ( + "hashtag", + models.SlugField(max_length=100, primary_key=True, serialize=False), + ), + ( + "name_override", + models.CharField(blank=True, max_length=100, null=True), + ), + ("public", models.BooleanField(null=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("stats_updated", models.DateTimeField(blank=True, null=True)), + ("aliases", models.JSONField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "activities_hashtag", + }, + ), + migrations.CreateModel( + name="Identity", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_identity, + primary_key=True, + serialize=False, + ), + ), + ("actor_uri", models.CharField(max_length=500, unique=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField(db_index=True)), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ( + "name", + models.CharField( + blank=True, max_length=500, null=True, verbose_name="昵称" + ), + ), + ("summary", models.TextField(blank=True, null=True, verbose_name="简介")), + ( + "manually_approves_followers", + models.BooleanField(default=False, verbose_name="手工审核关注者"), + ), + ( + "discoverable", + models.BooleanField(default=True, verbose_name="允许被发现或推荐"), + ), + ( + "profile_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("inbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "shared_inbox_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("outbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ("icon_uri", models.CharField(blank=True, max_length=500, null=True)), + ("image_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "followers_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "following_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "featured_collection_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("actor_type", models.CharField(default="person", max_length=100)), + ("metadata", models.JSONField(blank=True, null=True)), + ("pinned", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ( + "restriction", + models.IntegerField( + choices=[(0, "None"), (1, "Limited"), (2, "Blocked")], + db_index=True, + default=0, + ), + ), + ("admin_notes", models.TextField(blank=True, null=True)), + ("private_key", models.TextField(blank=True, null=True)), + ("public_key", models.TextField(blank=True, null=True)), + ("public_key_id", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("fetched", models.DateTimeField(blank=True, null=True)), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="identities", + to="takahe.domain", + ), + ), + ], + options={ + "verbose_name_plural": "identities", + "db_table": "users_identity", + }, + ), + migrations.AddField( + model_name="identity", + name="icon", + field=models.ImageField( + blank=True, + null=True, + storage=takahe.models.upload_store, + upload_to=functools.partial( + takahe.models.upload_namer, *("profile_images",), **{} + ), + verbose_name="头像", + ), + ), + migrations.AddField( + model_name="identity", + name="image", + field=models.ImageField( + blank=True, + null=True, + storage=takahe.models.upload_store, + upload_to=functools.partial( + takahe.models.upload_namer, *("background_images",), **{} + ), + ), + ), + migrations.AddField( + model_name="identity", + name="state_locked_until", + field=models.DateTimeField(blank=True, db_index=True, null=True), + ), + migrations.AddField( + model_name="identity", + name="state_next_attempt", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.CreateModel( + name="Post", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField()), + ( + "object_uri", + models.CharField( + blank=True, max_length=2048, null=True, unique=True + ), + ), + ( + "visibility", + models.IntegerField( + choices=[ + (0, "Public"), + (4, "Local Only"), + (1, "Unlisted"), + (2, "Followers"), + (3, "Mentioned"), + ], + default=0, + ), + ), + ("content", models.TextField()), + ( + "type", + models.CharField( + choices=[ + ("Article", "Article"), + ("Audio", "Audio"), + ("Event", "Event"), + ("Image", "Image"), + ("Note", "Note"), + ("Page", "Page"), + ("Question", "Question"), + ("Video", "Video"), + ], + default="Note", + max_length=20, + ), + ), + ("type_data", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ("summary", models.TextField(blank=True, null=True)), + ("url", models.CharField(blank=True, max_length=2048, null=True)), + ( + "in_reply_to", + models.CharField( + blank=True, db_index=True, max_length=500, null=True + ), + ), + ("hashtags", models.JSONField(blank=True, null=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("edited", models.DateTimeField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "author", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="posts", + to="takahe.identity", + ), + ), + ( + "emojis", + models.ManyToManyField( + blank=True, related_name="posts_using_emoji", to="takahe.emoji" + ), + ), + ( + "mentions", + models.ManyToManyField( + blank=True, + related_name="posts_mentioning", + to="takahe.identity", + ), + ), + ( + "to", + models.ManyToManyField( + blank=True, related_name="posts_to", to="takahe.identity" + ), + ), + ], + options={ + "db_table": "activities_post", + }, + ), + migrations.CreateModel( + name="User", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ("email", models.EmailField(max_length=254, unique=True)), + ("admin", models.BooleanField(default=False)), + ("moderator", models.BooleanField(default=False)), + ("banned", models.BooleanField(default=False)), + ("deleted", models.BooleanField(default=False)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("last_seen", models.DateTimeField(auto_now_add=True)), + ], + options={ + "db_table": "users_user", + }, + ), + migrations.CreateModel( + name="PostInteraction", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post_interaction, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ( + "type", + models.CharField( + choices=[ + ("like", "Like"), + ("boost", "Boost"), + ("vote", "Vote"), + ("pin", "Pin"), + ], + max_length=100, + ), + ), + ("value", models.CharField(blank=True, max_length=50, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "identity", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.identity", + ), + ), + ( + "post", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.post", + ), + ), + ], + options={ + "db_table": "activities_postinteraction", + }, + ), + migrations.AddField( + model_name="identity", + name="users", + field=models.ManyToManyField( + blank=True, related_name="identities", to="takahe.user" + ), + ), + migrations.AddField( + model_name="domain", + name="users", + field=models.ManyToManyField( + blank=True, related_name="domains", to="takahe.user" + ), + ), + migrations.CreateModel( + name="Block", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("mute", models.BooleanField()), + ("include_notifications", models.BooleanField(default=False)), + ("expires", models.DateTimeField(blank=True, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_blocks", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_blocks", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_block", + }, + ), + migrations.AlterUniqueTogether( + name="identity", + unique_together={("username", "domain")}, + ), + migrations.CreateModel( + name="Follow", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_follow, + primary_key=True, + serialize=False, + ), + ), + ( + "boosts", + models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ), + ), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("state", models.CharField(default="unrequested", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_follows", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_follows", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_follow", + "unique_together": {("source", "target")}, + }, + ), + migrations.CreateModel( + name="InboxMessage", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("message", models.JSONField()), + ("state", models.CharField(default="received", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ], + options={ + "db_table": "users_inboxmessage", + }, + ), + migrations.CreateModel( + name="Config", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("key", models.CharField(max_length=500)), + ("json", models.JSONField(blank=True, null=True)), + ("image", models.ImageField(blank=True, null=True, upload_to="")), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.domain", + ), + ), + ( + "identity", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.identity", + ), + ), + ( + "user", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="configs", + to="takahe.user", + ), + ), + ], + options={ + "db_table": "core_config", + "unique_together": {("key", "user", "identity", "domain")}, + }, + ), + migrations.CreateModel( + name="Invite", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("token", models.CharField(max_length=500, unique=True)), + ("note", models.TextField(blank=True, null=True)), + ("uses", models.IntegerField(blank=True, null=True)), + ("expires", models.DateTimeField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "users_invite", + }, + ), + ] diff --git a/takahe/migrations/__init__.py b/takahe/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/models.py b/takahe/models.py new file mode 100644 index 00000000..635c0331 --- /dev/null +++ b/takahe/models.py @@ -0,0 +1,1646 @@ +import datetime +import os +import random +import re +import secrets +import ssl +import time +from datetime import date +from functools import cached_property, partial +from typing import TYPE_CHECKING, Literal, Optional +from urllib.parse import urlparse + +import httpx +import urlman +from cachetools import TTLCache, cached +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding, rsa +from django.conf import settings +from django.contrib.auth.models import AbstractBaseUser, BaseUserManager +from django.core.files.storage import FileSystemStorage +from django.db import models, transaction +from django.template.defaultfilters import linebreaks_filter +from django.utils import timezone +from django.utils.safestring import mark_safe +from django.utils.translation import gettext_lazy as _ +from loguru import logger +from lxml import etree + +from .html import ContentRenderer, FediverseHtmlParser +from .uris import * + +if TYPE_CHECKING: + from django.db.models.manager import RelatedManager + + +class Snowflake: + """ + Snowflake ID generator and parser. + """ + + # Epoch is 2022/1/1 at midnight, as these are used for _created_ times in our + # own database, not original publish times (which would need an earlier one) + EPOCH = 1641020400 + + TYPE_POST = 0b000 + TYPE_POST_INTERACTION = 0b001 + TYPE_IDENTITY = 0b010 + TYPE_REPORT = 0b011 + TYPE_FOLLOW = 0b100 + + @classmethod + def generate(cls, type_id: int) -> int: + """ + Generates a snowflake-style ID for the given "type". They are designed + to fit inside 63 bits (a signed bigint) + + ID layout is: + * 41 bits of millisecond-level timestamp (enough for EPOCH + 69 years) + * 19 bits of random data (1% chance of clash at 10000 per millisecond) + * 3 bits of type information + + We use random data rather than a sequence ID to try and avoid pushing + this job onto the DB - we may do that in future. If a clash does + occur, the insert will fail and Stator will retry the work for anything + that's coming in remotely, leaving us to just handle that scenario for + our own posts, likes, etc. + """ + # Get the current time in milliseconds + now: int = int((time.time() - cls.EPOCH) * 1000) + # Generate random data + rand_seq: int = secrets.randbits(19) + # Compose them together + return (now << 22) | (rand_seq << 3) | type_id + + @classmethod + def get_type(cls, snowflake: int) -> int: + """ + Returns the type of a given snowflake ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return snowflake & 0b111 + + @classmethod + def get_time(cls, snowflake: int) -> float: + """ + Returns the generation time (in UNIX timestamp seconds) of the ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return ((snowflake >> 22) / 1000) + cls.EPOCH + + # Handy pre-baked methods for django model defaults + @classmethod + def generate_post(cls) -> int: + return cls.generate(cls.TYPE_POST) + + @classmethod + def generate_post_interaction(cls) -> int: + return cls.generate(cls.TYPE_POST_INTERACTION) + + @classmethod + def generate_identity(cls) -> int: + return cls.generate(cls.TYPE_IDENTITY) + + @classmethod + def generate_report(cls) -> int: + return cls.generate(cls.TYPE_REPORT) + + @classmethod + def generate_follow(cls) -> int: + return cls.generate(cls.TYPE_FOLLOW) + + +class RsaKeys: + @classmethod + def generate_keypair(cls) -> tuple[str, str]: + """ + Generates a new RSA keypair + """ + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + ) + private_key_serialized = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("ascii") + public_key_serialized = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode("ascii") + ) + return private_key_serialized, public_key_serialized + + +class Invite(models.Model): + """ + An invite token, good for one signup. + """ + + class Meta: + # managed = False + db_table = "users_invite" + + # Should always be lowercase + token = models.CharField(max_length=500, unique=True) + + # Admin note about this code + note = models.TextField(null=True, blank=True) + + # Uses remaining (null means "infinite") + uses = models.IntegerField(null=True, blank=True) + + # Expiry date + expires = models.DateTimeField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + @classmethod + def create_random(cls, uses=None, expires=None, note=None): + return cls.objects.create( + token="".join( + random.choice("abcdefghkmnpqrstuvwxyz23456789") for i in range(20) + ), + uses=uses, + expires=expires, + note=note, + ) + + @property + def valid(self): + if self.uses is not None: + if self.uses <= 0: + return False + if self.expires is not None: + return self.expires >= timezone.now() + return True + + +class User(AbstractBaseUser): + identities: "RelatedManager[Identity]" + + class Meta: + # managed = False + db_table = "users_user" + + email = models.EmailField(unique=True) + admin = models.BooleanField(default=False) + moderator = models.BooleanField(default=False) + banned = models.BooleanField(default=False) + deleted = models.BooleanField(default=False) + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + last_seen = models.DateTimeField(auto_now_add=True) + USERNAME_FIELD = "email" + EMAIL_FIELD = "email" + REQUIRED_FIELDS: list[str] = [] + + @property + def is_active(self): + return not (self.deleted or self.banned) + + @property + def is_superuser(self): + return self.admin + + @property + def is_staff(self): + return self.admin + + def has_module_perms(self, module): + return self.admin + + def has_perm(self, perm): + return self.admin + + # @cached_property + # def config_user(self) -> Config.UserOptions: + # return Config.load_user(self) + + +class Domain(models.Model): + """ + Represents a domain that a user can have an account on. + + For protocol reasons, if we want to allow custom usernames + per domain, each "display" domain (the one in the handle) must either let + us serve on it directly, or have a "service" domain that maps + to it uniquely that we can serve on that. + + That way, someone coming in with just an Actor URI as their + entrypoint can still try to webfinger preferredUsername@actorDomain + and we can return an appropriate response. + + It's possible to just have one domain do both jobs, of course. + This model also represents _other_ servers' domains, which we treat as + display domains for now, until we start doing better probing. + """ + + domain = models.CharField(max_length=250, primary_key=True) + service_domain = models.CharField( + max_length=250, + null=True, + blank=True, + db_index=True, + unique=True, + ) + + # state = StateField(DomainStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # nodeinfo 2.0 detail about the remote server + nodeinfo = models.JSONField(null=True, blank=True) + + # If we own this domain + local = models.BooleanField() + + # If we have blocked this domain from interacting with us + blocked = models.BooleanField(default=False) + + # Domains can be joinable by any user of the instance (as the default one + # should) + public = models.BooleanField(default=False) + + # If this is the default domain (shown as the default entry for new users) + default = models.BooleanField(default=False) + + # Domains can also be linked to one or more users for their private use + # This should be display domains ONLY + users = models.ManyToManyField("takahe.User", related_name="domains", blank=True) + + # Free-form notes field for admins + notes = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class urls(urlman.Urls): + root = "/admin/domains/" + create = "/admin/domains/create/" + edit = "/admin/domains/{self.domain}/" + delete = "{edit}delete/" + root_federation = "/admin/federation/" + edit_federation = "/admin/federation/{self.domain}/" + + class Meta: + # managed = False + db_table = "users_domain" + indexes: list = [] + + @classmethod + def get_remote_domain(cls, domain: str) -> "Domain": + return cls.objects.get_or_create(domain=domain.lower(), local=False)[0] + + @classmethod + def get_domain(cls, domain: str) -> Optional["Domain"]: + try: + return cls.objects.get( + models.Q(domain=domain.lower()) + | models.Q(service_domain=domain.lower()) + ) + except cls.DoesNotExist: + return None + + @property + def uri_domain(self) -> str: + if self.service_domain: + return self.service_domain + return self.domain + + @classmethod + def available_for_user(cls, user): + """ + Returns domains that are available for the user to put an identity on + """ + return cls.objects.filter( + models.Q(public=True) | models.Q(users__id=user.id), + local=True, + ).order_by("-default", "domain") + + def __str__(self): + return self.domain + + +def upload_store(): + return FileSystemStorage( + location=settings.TAKAHE_MEDIA_ROOT, base_url=settings.TAKAHE_MEDIA_URL + ) + + +def upload_namer(prefix, instance, filename): + """ + Names uploaded images. + + By default, obscures the original name with a random UUID. + """ + _, old_extension = os.path.splitext(filename) + new_filename = secrets.token_urlsafe(20) + now = timezone.now() + return f"{prefix}/{now.year}/{now.month}/{now.day}/{new_filename}{old_extension}" + + +class Identity(models.Model): + """ + Represents both local and remote Fediverse identities (actors) + """ + + domain_id: int + + class Restriction(models.IntegerChoices): + none = 0 + limited = 1 + blocked = 2 + + ACTOR_TYPES = ["person", "service", "application", "group", "organization"] + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_identity) + + # The Actor URI is essentially also a PK - we keep the default numeric + # one around as well for making nice URLs etc. + actor_uri = models.CharField(max_length=500, unique=True) + + # state = StateField(IdentityStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + state_next_attempt = models.DateTimeField(blank=True, null=True) + state_locked_until = models.DateTimeField(null=True, blank=True, db_index=True) + + local = models.BooleanField(db_index=True) + users = models.ManyToManyField( + "takahe.User", + related_name="identities", + blank=True, + ) + + username = models.CharField(max_length=500, blank=True, null=True) + # Must be a display domain if present + domain = models.ForeignKey( + Domain, + blank=True, + null=True, + on_delete=models.PROTECT, + related_name="identities", + ) + + name = models.CharField(max_length=500, blank=True, null=True, verbose_name=_("昵称")) + summary = models.TextField(blank=True, null=True, verbose_name=_("简介")) + manually_approves_followers = models.BooleanField( + default=False, verbose_name=_("手工审核关注者") + ) + discoverable = models.BooleanField(default=True, verbose_name=_("允许被发现或推荐")) + + profile_uri = models.CharField(max_length=500, blank=True, null=True) + inbox_uri = models.CharField(max_length=500, blank=True, null=True) + shared_inbox_uri = models.CharField(max_length=500, blank=True, null=True) + outbox_uri = models.CharField(max_length=500, blank=True, null=True) + icon_uri = models.CharField(max_length=500, blank=True, null=True) + image_uri = models.CharField(max_length=500, blank=True, null=True) + followers_uri = models.CharField(max_length=500, blank=True, null=True) + following_uri = models.CharField(max_length=500, blank=True, null=True) + featured_collection_uri = models.CharField(max_length=500, blank=True, null=True) + actor_type = models.CharField(max_length=100, default="person") + + icon = models.ImageField( + upload_to=partial(upload_namer, "profile_images"), + blank=True, + null=True, + verbose_name=_("头像"), + storage=upload_store, + ) + image = models.ImageField( + upload_to=partial(upload_namer, "background_images"), + blank=True, + null=True, + storage=upload_store, + ) + + # Should be a list of {"name":..., "value":...} dicts + metadata = models.JSONField(blank=True, null=True) + + # Should be a list of object URIs (we don't want a full M2M here) + pinned = models.JSONField(blank=True, null=True) + + # Admin-only moderation fields + sensitive = models.BooleanField(default=False) + restriction = models.IntegerField( + choices=Restriction.choices, default=Restriction.none, db_index=True + ) + admin_notes = models.TextField(null=True, blank=True) + + private_key = models.TextField(null=True, blank=True) + public_key = models.TextField(null=True, blank=True) + public_key_id = models.TextField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + fetched = models.DateTimeField(null=True, blank=True) + deleted = models.DateTimeField(null=True, blank=True) + + # objects = IdentityManager() + + ### Model attributes ### + + class Meta: + # managed = False + db_table = "users_identity" + verbose_name_plural = "identities" + unique_together = [("username", "domain")] + indexes: list = [] # We need this so Stator can add its own + + class urls(urlman.Urls): + view = "/@{self.username}@{self.domain_id}/" + replies = "{view}replies/" + settings = "{view}settings/" + action = "{view}action/" + followers = "{view}followers/" + following = "{view}following/" + search = "{view}search/" + activate = "{view}activate/" + admin = "/admin/identities/" + admin_edit = "{admin}{self.pk}/" + djadmin_edit = "/djadmin/users/identity/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.domain.uri_domain + + def __str__(self): + if self.username and self.domain: + return self.handle + return self.actor_uri + + def absolute_profile_uri(self): + """ + Returns a profile URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return f"https://{self.domain.uri_domain}/@{self.username}/" + else: + return self.profile_uri + + @property + def handle(self): + if self.username is None: + return "(unknown user)" + if self.domain_id: + return f"{self.username}@{self.domain_id}" + return f"{self.username}@(unknown server)" + + @property + def url(self): + return ( + f"/users/{self.username}/" + if self.local + else f"/users/@{self.username}@{self.domain_id}/" + ) + + @property + def user_pk(self): + user = self.users.first() + return user.pk if user else None + + @classmethod + def fetch_webfinger_url(cls, domain: str) -> str: + """ + Given a domain (hostname), returns the correct webfinger URL to use + based on probing host-meta. + """ + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + f"https://{domain}/.well-known/host-meta", + follow_redirects=True, + headers={"Accept": "application/xml"}, + ) + + # In the case of anything other than a success, we'll still try + # hitting the webfinger URL on the domain we were given to handle + # incorrectly setup servers. + if response.status_code == 200 and response.content.strip(): + tree = etree.fromstring(response.content) + template = tree.xpath( + "string(.//*[local-name() = 'Link' and @rel='lrdd' and (not(@type) or @type='application/jrd+json')]/@template)" + ) + if template: + return template # type: ignore + except (httpx.RequestError, etree.ParseError): + pass + + return f"https://{domain}/.well-known/webfinger?resource={{uri}}" + + @classmethod + def fetch_webfinger(cls, handle: str) -> tuple[str | None, str | None]: + """ + Given a username@domain handle, returns a tuple of + (actor uri, canonical handle) or None, None if it does not resolve. + """ + domain = handle.split("@")[1].lower() + try: + webfinger_url = cls.fetch_webfinger_url(domain) + except ssl.SSLCertVerificationError: + return None, None + + # Go make a Webfinger request + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + webfinger_url.format(uri=f"acct:{handle}"), + follow_redirects=True, + headers={"Accept": "application/json"}, + ) + response.raise_for_status() + except (httpx.HTTPError, ssl.SSLCertVerificationError) as ex: + response = getattr(ex, "response", None) + if ( + response + and response.status_code < 500 + and response.status_code not in [400, 401, 403, 404, 406, 410] + ): + raise ValueError( + f"Client error fetching webfinger: {response.status_code}", + response.content, + ) + return None, None + + try: + data = response.json() + except ValueError: + # Some servers return these with a 200 status code! + if b"not found" in response.content.lower(): + return None, None + raise ValueError( + "JSON parse error fetching webfinger", + response.content, + ) + try: + if data["subject"].startswith("acct:"): + data["subject"] = data["subject"][5:] + for link in data["links"]: + if ( + link.get("type") == "application/activity+json" + and link.get("rel") == "self" + ): + return link["href"], data["subject"] + except KeyError: + # Server returning wrong payload structure + pass + return None, None + + @classmethod + def by_username_and_domain( + cls, + username: str, + domain: str | Domain, + fetch: bool = False, + local: bool = False, + ): + """ + Get an Identity by username and domain. + + When fetch is True, a failed lookup will do a webfinger lookup to attempt to do + a lookup by actor_uri, creating an Identity record if one does not exist. When + local is True, lookups will be restricted to local domains. + + If domain is a Domain, domain.local is used instead of passsed local. + + """ + if username.startswith("@"): + raise ValueError("Username must not start with @") + + domain_instance = None + + if isinstance(domain, Domain): + domain_instance = domain + local = domain.local + domain = domain.domain + else: + domain = domain.lower() + try: + if local: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + local=True, + ) + else: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + ) + except cls.DoesNotExist: + if fetch and not local: + actor_uri, handle = cls.fetch_webfinger(f"{username}@{domain}") + if handle is None: + return None + # See if this actually does match an existing actor + try: + return cls.objects.get(actor_uri=actor_uri) + except cls.DoesNotExist: + pass + # OK, make one + username, domain = handle.split("@") + if not domain_instance: + domain_instance = Domain.get_remote_domain(domain) + return cls.objects.create( + actor_uri=actor_uri, + username=username, + domain_id=domain_instance, + local=False, + ) + return None + + def generate_keypair(self): + if not self.local: + raise ValueError("Cannot generate keypair for remote user") + self.private_key, self.public_key = RsaKeys.generate_keypair() + self.public_key_id = self.actor_uri + "#main-key" + self.save() + + +class Follow(models.Model): + """ + When one user (the source) follows other (the target) + """ + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_follow) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_follows", + ) + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_follows", + ) + + boosts = models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + note = models.TextField(blank=True, null=True) + + # state = StateField(FollowStates) + state = models.CharField(max_length=100, default="unrequested") + state_changed = models.DateTimeField(auto_now_add=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_follow" + unique_together = [("source", "target")] + indexes: list = [] # We need this so Stator can add its own + + def __str__(self): + return f"#{self.id}: {self.source} → {self.target}" + + +class PostQuerySet(models.QuerySet): + def not_hidden(self): + query = self.exclude(state__in=["deleted", "deleted_fanned_out"]) + return query + + def public(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + ], + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def local_public(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + ], + local=True, + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def unlisted(self, include_replies: bool = False): + query = self.filter( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + Post.Visibilities.unlisted, + ], + ) + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + def visible_to(self, identity: Identity | None, include_replies: bool = False): + if identity is None: + return self.unlisted(include_replies=include_replies) + query = self.filter( + models.Q( + visibility__in=[ + Post.Visibilities.public, + Post.Visibilities.local_only, + Post.Visibilities.unlisted, + ] + ) + | models.Q( + visibility=Post.Visibilities.followers, + author__inbound_follows__source=identity, + ) + | models.Q( + mentions=identity, + ) + | models.Q(author=identity) + ).distinct() + if not include_replies: + return query.filter(in_reply_to__isnull=True) + return query + + # def tagged_with(self, hashtag: str | Hashtag): + # if isinstance(hashtag, str): + # tag_q = models.Q(hashtags__contains=hashtag) + # else: + # tag_q = models.Q(hashtags__contains=hashtag.hashtag) + # if hashtag.aliases: + # for alias in hashtag.aliases: + # tag_q |= models.Q(hashtags__contains=alias) + # return self.filter(tag_q) + + +class PostManager(models.Manager): + def get_queryset(self): + return PostQuerySet(self.model, using=self._db) + + def not_hidden(self): + return self.get_queryset().not_hidden() + + def public(self, include_replies: bool = False): + return self.get_queryset().public(include_replies=include_replies) + + def local_public(self, include_replies: bool = False): + return self.get_queryset().local_public(include_replies=include_replies) + + def unlisted(self, include_replies: bool = False): + return self.get_queryset().unlisted(include_replies=include_replies) + + # def tagged_with(self, hashtag: str | Hashtag): + # return self.get_queryset().tagged_with(hashtag=hashtag) + + +class Post(models.Model): + """ + A post (status, toot) that is either local or remote. + """ + + interactions: "models.QuerySet[PostInteraction]" + + class Visibilities(models.IntegerChoices): + public = 0 + local_only = 4 + unlisted = 1 + followers = 2 + mentioned = 3 + + class Types(models.TextChoices): + article = "Article" + audio = "Audio" + event = "Event" + image = "Image" + note = "Note" + page = "Page" + question = "Question" + video = "Video" + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_post) + + # The author (attributedTo) of the post + author = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="posts", + ) + + # The state the post is in + # state = StateField(PostStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # If it is our post or not + local = models.BooleanField() + + # The canonical object ID + object_uri = models.CharField(max_length=2048, blank=True, null=True, unique=True) + + # Who should be able to see this Post + visibility = models.IntegerField( + choices=Visibilities.choices, + default=Visibilities.public, + ) + + # The main (HTML) content + content = models.TextField() + + type = models.CharField( + max_length=20, + choices=Types.choices, + default=Types.note, + ) + type_data = models.JSONField( + blank=True, + null=True, # , encoder=PostTypeDataEncoder, decoder=PostTypeDataDecoder + ) + + # If the contents of the post are sensitive, and the summary (content + # warning) to show if it is + sensitive = models.BooleanField(default=False) + summary = models.TextField(blank=True, null=True) + + # The public, web URL of this Post on the original server + url = models.CharField(max_length=2048, blank=True, null=True) + + # The Post it is replying to as an AP ID URI + # (as otherwise we'd have to pull entire threads to use IDs) + in_reply_to = models.CharField(max_length=500, blank=True, null=True, db_index=True) + + # The identities the post is directly to (who can see it if not public) + to = models.ManyToManyField( + "takahe.Identity", + related_name="posts_to", + blank=True, + ) + + # The identities mentioned in the post + mentions = models.ManyToManyField( + "takahe.Identity", + related_name="posts_mentioning", + blank=True, + ) + + # Hashtags in the post + hashtags = models.JSONField(blank=True, null=True) + + emojis = models.ManyToManyField( + "takahe.Emoji", + related_name="posts_using_emoji", + blank=True, + ) + + # Like/Boost/etc counts + stats = models.JSONField(blank=True, null=True) + + # When the post was originally created (as opposed to when we received it) + published = models.DateTimeField(default=timezone.now) + + # If the post has been edited after initial publication + edited = models.DateTimeField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + objects: PostManager = PostManager() + + class Meta: + # managed = False + db_table = "activities_post" + + class urls(urlman.Urls): + view = "{self.author.urls.view}posts/{self.id}/" + object_uri = "{self.author.actor_uri}posts/{self.id}/" + action_like = "{view}like/" + action_unlike = "{view}unlike/" + action_boost = "{view}boost/" + action_unboost = "{view}unboost/" + action_bookmark = "{view}bookmark/" + action_unbookmark = "{view}unbookmark/" + action_delete = "{view}delete/" + action_edit = "{view}edit/" + action_report = "{view}report/" + action_reply = "/compose/?reply_to={self.id}" + admin_edit = "/djadmin/activities/post/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.author.domain.uri_domain + + def __str__(self): + return f"{self.author} #{self.id}" + + def get_absolute_url(self): + return self.urls.view + + def absolute_object_uri(self): + """ + Returns an object URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return self.author.absolute_profile_uri() + f"posts/{self.id}/" + else: + return self.object_uri + + def in_reply_to_post(self) -> Optional["Post"]: + """ + Returns the actual Post object we're replying to, if we can find it + """ + if self.in_reply_to is None: + return None + return ( + Post.objects.filter(object_uri=self.in_reply_to) + .select_related("author") + .first() + ) + + @classmethod + def create_local( + cls, + author: Identity, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool = False, + visibility: int = Visibilities.public, + reply_to: Optional["Post"] = None, + attachments: list | None = None, + type_data: dict | None = None, + published: datetime.datetime | None = None, + ) -> "Post": + with transaction.atomic(): + # Find mentions in this post + mentions = cls.mentions_from_content(content, author) + if reply_to: + mentions.add(reply_to.author) + # Maintain local-only for replies + if reply_to.visibility == reply_to.Visibilities.local_only: + visibility = reply_to.Visibilities.local_only + # Find emoji in this post + emojis = Emoji.emojis_from_content(content, None) + # Strip all unwanted HTML and apply linebreaks filter, grabbing hashtags on the way + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + content = parser.html.replace("

", "

" + raw_prepend_content, 1) + hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + # Make the Post object + post = cls.objects.create( + author=author, + content=content, + summary=summary or None, + sensitive=bool(summary) or sensitive, + local=True, + visibility=visibility, + hashtags=hashtags, + in_reply_to=reply_to.object_uri if reply_to else None, + ) + post.object_uri = post.urls.object_uri + post.url = post.absolute_object_uri() + post.mentions.set(mentions) + post.emojis.set(emojis) + if published and published < timezone.now(): + post.published = published + if timezone.now() - published > datetime.timedelta(days=2): + post.state = "fanned_out" # add post quietly if it's old + # if attachments:# FIXME + # post.attachments.set(attachments) + # if question: # FIXME + # post.type = question["type"] + # post.type_data = PostTypeData(__root__=question).__root__ + if type_data: + post.type_data = type_data + post.save() + # Recalculate parent stats for replies + if reply_to: + reply_to.calculate_stats() + return post + + def edit_local( + self, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool | None = None, + visibility: int = Visibilities.public, + attachments: list | None = None, + attachment_attributes: list | None = None, + type_data: dict | None = None, + ): + with transaction.atomic(): + # Strip all HTML and apply linebreaks filter + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + self.content = parser.html.replace("

", "

" + raw_prepend_content, 1) + self.hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + self.summary = summary or None + self.sensitive = bool(summary) if sensitive is None else sensitive + self.visibility = visibility + self.edited = timezone.now() + self.mentions.set(self.mentions_from_content(content, self.author)) + self.emojis.set(Emoji.emojis_from_content(content, None)) + # self.attachments.set(attachments or []) # fixme + if type_data: + self.type_data = type_data + self.save() + + for attrs in attachment_attributes or []: + attachment = next( + (a for a in attachments or [] if str(a.id) == attrs.id), None + ) + if attachment is None: + continue + attachment.name = attrs.description + attachment.save() + + self.state = "edited" + self.state_changed = timezone.now() + self.state_next_attempt = None + self.state_locked_until = None + self.save() + + @classmethod + def mentions_from_content(cls, content, author) -> set[Identity]: + mention_hits = FediverseHtmlParser(content, find_mentions=True).mentions + mentions = set() + for handle in mention_hits: + handle = handle.lower() + if "@" in handle: + username, domain = handle.split("@", 1) + else: + username = handle + domain = author.domain_id + identity = Identity.by_username_and_domain( + username=username, + domain=domain, + fetch=True, + ) + if identity is not None: + mentions.add(identity) + return mentions + + def calculate_stats(self, save=True): + """ + Recalculates our stats dict + """ + from .models import PostInteraction + + self.stats = { + "likes": self.interactions.filter( + type=PostInteraction.Types.like, + state__in=["new", "fanned_out"], + ).count(), + "boosts": self.interactions.filter( + type=PostInteraction.Types.boost, + state__in=["new", "fanned_out"], + ).count(), + "replies": Post.objects.filter(in_reply_to=self.object_uri).count(), + } + if save: + self.save() + + @property + def safe_content_local(self): + return ContentRenderer(local=True).render_post(self.content, self) + + +class EmojiQuerySet(models.QuerySet): + def usable(self, domain: Domain | None = None): + """ + Returns all usable emoji, optionally filtering by domain too. + """ + visible_q = models.Q(local=True) | models.Q(public=True) + if True: # Config.system.emoji_unreviewed_are_public: + visible_q |= models.Q(public__isnull=True) + qs = self.filter(visible_q) + + if domain: + if not domain.local: + qs = qs.filter(domain=domain) + + return qs + + +class EmojiManager(models.Manager): + def get_queryset(self): + return EmojiQuerySet(self.model, using=self._db) + + def usable(self, domain: Domain | None = None): + return self.get_queryset().usable(domain) + + +class Emoji(models.Model): + class Meta: + # managed = False + db_table = "activities_emoji" + + # Normalized Emoji without the ':' + shortcode = models.SlugField(max_length=100, db_index=True) + + domain = models.ForeignKey( + "takahe.Domain", null=True, blank=True, on_delete=models.CASCADE + ) + local = models.BooleanField(default=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + mimetype = models.CharField(max_length=200) + + # Files may not be populated if it's remote and not cached on our side yet + file = models.ImageField( + # upload_to=partial(upload_emoji_namer, "emoji"), + null=True, + blank=True, + ) + + # A link to the custom emoji + remote_url = models.CharField(max_length=500, blank=True, null=True) + + # Used for sorting custom emoji in the picker + category = models.CharField(max_length=100, blank=True, null=True) + + # State of this Emoji + # state = StateField(EmojiStates) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = EmojiManager() + + @classmethod + def emojis_from_content(cls, content: str, domain: Domain | None) -> list["Emoji"]: + """ + Return a parsed and sanitized of emoji found in content without + the surrounding ':'. + """ + emoji_hits = FediverseHtmlParser( + content, find_emojis=True, emoji_domain=domain + ).emojis + emojis = sorted({emoji for emoji in emoji_hits}) + q = models.Q(local=True) | models.Q(public=True) | models.Q(public__isnull=True) + if domain and not domain.local: + q = q & models.Q(domain=domain) + return list( + cls.objects.filter(local=(domain is None) or domain.local) + .filter(q) + .filter(shortcode__in=emojis) + ) + + @classmethod + @cached(cache=TTLCache(maxsize=1000, ttl=60)) + def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji | None": + """ + Given an emoji shortcode and optional domain, looks up the single + emoji and returns it. Raises Emoji.DoesNotExist if there isn't one. + """ + try: + if domain is None or domain.local: + return cls.objects.get(local=True, shortcode=shortcode) + else: + return cls.objects.get(domain=domain, shortcode=shortcode) + except Emoji.DoesNotExist: + return None + + @property + def fullcode(self): + return f":{self.shortcode}:" + + @property + def is_usable(self) -> bool: + """ + Return True if this Emoji is usable. + """ + return self.public or self.public is None + + def full_url(self, always_show=False) -> RelativeAbsoluteUrl: + if self.is_usable or always_show: + if self.file: + return AutoAbsoluteUrl(settings.TAKAHE_MEDIA_URL + self.file.name) + # return AutoAbsoluteUrl(self.file.url) + elif self.remote_url: + return ProxyAbsoluteUrl( + f"/proxy/emoji/{self.pk}/", + remote_url=self.remote_url, + ) + return StaticAbsoluteUrl("img/blank-emoji-128.png") + + def as_html(self): + if self.is_usable: + return mark_safe( + f'Emoji {self.shortcode}' + ) + return self.fullcode + + +class HashtagQuerySet(models.QuerySet): + def public(self): + public_q = models.Q(public=True) + if True: # Config.system.hashtag_unreviewed_are_public: + public_q |= models.Q(public__isnull=True) + return self.filter(public_q) + + def hashtag_or_alias(self, hashtag: str): + return self.filter( + models.Q(hashtag=hashtag) | models.Q(aliases__contains=hashtag) + ) + + +class HashtagManager(models.Manager): + def get_queryset(self): + return HashtagQuerySet(self.model, using=self._db) + + def public(self): + return self.get_queryset().public() + + def hashtag_or_alias(self, hashtag: str): + return self.get_queryset().hashtag_or_alias(hashtag) + + +class Hashtag(models.Model): + class Meta: + # managed = False + db_table = "activities_hashtag" + + MAXIMUM_LENGTH = 100 + + # Normalized hashtag without the '#' + hashtag = models.SlugField(primary_key=True, max_length=100) + + # Friendly display override + name_override = models.CharField(max_length=100, null=True, blank=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + # State of this Hashtag + # state = StateField(HashtagStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # Metrics for this Hashtag + stats = models.JSONField(null=True, blank=True) + # Timestamp of last time the stats were updated + stats_updated = models.DateTimeField(null=True, blank=True) + + # List of other hashtags that are considered similar + aliases = models.JSONField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = HashtagManager() + + class urls(urlman.Urls): + view = "/tags/{self.hashtag}/" + follow = "/tags/{self.hashtag}/follow/" + unfollow = "/tags/{self.hashtag}/unfollow/" + admin = "/admin/hashtags/" + admin_edit = "{admin}{self.hashtag}/" + admin_enable = "{admin_edit}enable/" + admin_disable = "{admin_edit}disable/" + timeline = "/tags/{self.hashtag}/" + + hashtag_regex = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + def save(self, *args, **kwargs): + self.hashtag = self.hashtag.lstrip("#") + if self.name_override: + self.name_override = self.name_override.lstrip("#") + return super().save(*args, **kwargs) + + @property + def display_name(self): + return self.name_override or self.hashtag + + def __str__(self): + return self.display_name + + def usage_months(self, num: int = 12) -> dict[date, int]: + """ + Return the most recent num months of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 2: + year = int(parts[0]) + month = int(parts[1]) + results[date(year, month, 1)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def usage_days(self, num: int = 7) -> dict[date, int]: + """ + Return the most recent num days of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 3: + year = int(parts[0]) + month = int(parts[1]) + day = int(parts[2]) + results[date(year, month, day)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def to_mastodon_json(self, following: bool | None = None): + value = { + "name": self.hashtag, + "url": self.urls.view.full(), # type: ignore + "history": [], + } + + if following is not None: + value["following"] = following + + return value + + +class PostInteraction(models.Model): + """ + Handles both boosts and likes + """ + + class Types(models.TextChoices): + like = "like" + boost = "boost" + vote = "vote" + pin = "pin" + + id = models.BigIntegerField( + primary_key=True, + default=Snowflake.generate_post_interaction, + ) + + # The state the boost is in + # state = StateField(PostInteractionStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # The canonical object ID + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + # What type of interaction it is + type = models.CharField(max_length=100, choices=Types.choices) + + # The user who boosted/liked/etc. + identity = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # The post that was boosted/liked/etc + post = models.ForeignKey( + "takahe.Post", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # Used to store any interaction extra text value like the vote + # in the question/poll case + value = models.CharField(max_length=50, blank=True, null=True) + + # When the activity was originally created (as opposed to when we received it) + # Mastodon only seems to send this for boosts, not likes + published = models.DateTimeField(default=timezone.now) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "activities_postinteraction" + + +class Block(models.Model): + """ + When one user (the source) mutes or blocks another (the target) + """ + + # state = StateField(BlockStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_blocks", + ) + + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_blocks", + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + + # If it is a mute, we will stop delivering any activities from target to + # source, but we will still deliver activities from source to target. + # A full block (mute=False) stops activities both ways. + mute = models.BooleanField() + include_notifications = models.BooleanField(default=False) + + expires = models.DateTimeField(blank=True, null=True) + note = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_block" + + def __str__(self): + return f"#{self.pk}: {self.source} blocks {self.target}" + + ### Alternate fetchers/constructors ### + + @classmethod + def maybe_get( + cls, source, target, mute=False, require_active=False + ) -> Optional["Block"]: + """ + Returns a Block if it exists between source and target + """ + try: + if require_active: + return cls.objects.filter( + status__in=["new", "sent", "awaiting_expiry"] + ).get(source=source, target=target, mute=mute) + else: + return cls.objects.get(source=source, target=target, mute=mute) + except cls.DoesNotExist: + return None + + @classmethod + def create_local_block(cls, source, target) -> "Block": + """ + Creates or updates a full Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot block from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=False) + if block is not None: + if not block.state in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=False, + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block + + @classmethod + def create_local_mute( + cls, + source, + target, + duration=None, + include_notifications=False, + ) -> "Block": + """ + Creates or updates a muting Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot mute from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=True) + if block is not None: + if not block in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + if duration: + block.expires = timezone.now() + datetime.timedelta(seconds=duration) + block.include_notifications = include_notifications + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=True, + include_notifications=include_notifications, + expires=( + timezone.now() + datetime.timedelta(seconds=duration) + if duration + else None + ), + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block + + +class InboxMessage(models.Model): + """ + an incoming inbox message that needs processing. + + Yes, this is kind of its own message queue built on the state graph system. + It's fine. It'll scale up to a decent point. + """ + + message = models.JSONField() + + # state = StateField(InboxMessageStates) + state = models.CharField(max_length=100, default="received") + state_changed = models.DateTimeField(auto_now_add=True) + + class Meta: + # managed = False + db_table = "users_inboxmessage" + + @classmethod + def create_internal(cls, payload): + """ + Creates an internal action message + """ + cls.objects.create( + message={ + "type": "__internal__", + "object": payload, + } + ) + + +class Config(models.Model): + """ + A configuration setting for either the server or a specific user or identity. + + The possible options and their defaults are defined at the bottom of the file. + """ + + key = models.CharField(max_length=500) + + user = models.ForeignKey( + User, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + identity = models.ForeignKey( + Identity, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + domain = models.ForeignKey( + Domain, + blank=True, + null=True, + related_name="configs", + on_delete=models.CASCADE, + ) + + json = models.JSONField(blank=True, null=True) + image = models.ImageField( + blank=True, + null=True, + ) + + class Meta: + # managed = False + db_table = "core_config" + unique_together = [ + ("key", "user", "identity", "domain"), + ] diff --git a/takahe/tests.py b/takahe/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/takahe/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/takahe/uris.py b/takahe/uris.py new file mode 100644 index 00000000..e686108b --- /dev/null +++ b/takahe/uris.py @@ -0,0 +1,89 @@ +import hashlib +import sys +from urllib.parse import urljoin + +from django.conf import settings +from django.contrib.staticfiles.storage import staticfiles_storage + + +class RelativeAbsoluteUrl: + """ + Represents a URL that can have both "relative" and "absolute" forms + for various use either locally or remotely. + """ + + absolute: str + relative: str + + def __init__(self, absolute: str, relative: str | None = None): + if "://" not in absolute: + raise ValueError(f"Absolute URL {absolute!r} is not absolute!") + self.absolute = absolute + self.relative = relative or absolute + + +class AutoAbsoluteUrl(RelativeAbsoluteUrl): + """ + Automatically makes the absolute variant by using either settings.MAIN_DOMAIN + or a passed identity's URI domain. + """ + + def __init__( + self, + relative: str, + identity=None, + ): + self.relative = relative + if identity: + absolute_prefix = f"https://{identity.domain.uri_domain}/" + else: + absolute_prefix = f"https://{settings.MAIN_DOMAIN}/" + self.absolute = urljoin(absolute_prefix, self.relative) + + +class ProxyAbsoluteUrl(AutoAbsoluteUrl): + """ + AutoAbsoluteUrl variant for proxy paths, that also attaches a remote URI hash + plus extension to the end if it can. + """ + + def __init__( + self, + relative: str, + identity=None, + remote_url: str | None = None, + ): + if remote_url: + # See if there is a file extension we can grab + extension = "bin" + remote_filename = remote_url.split("/")[-1] + if "." in remote_filename: + extension = remote_filename.split(".")[-1] + # When provided, attach a hash of the remote URL + # SHA1 chosen as it generally has the best performance in modern python, and security is not a concern + # Hash truncation is generally fine, as in the typical use case the hash is scoped to the identity PK. + relative += f"{hashlib.sha1(remote_url.encode('ascii')).hexdigest()[:10]}.{extension}" + super().__init__(relative, identity) + + +class StaticAbsoluteUrl(RelativeAbsoluteUrl): + """ + Creates static URLs given only the static-relative path + """ + + def __init__(self, path: str): + try: + static_url = staticfiles_storage.url(path) + except ValueError: + # Suppress static issues during the first collectstatic + # Yes, I know it's a big hack! Pull requests welcome :) + if "collectstatic" in sys.argv: + super().__init__("https://example.com/") + return + raise + if "://" in static_url: + super().__init__(static_url) + else: + super().__init__( + urljoin(f"https://{settings.MAIN_DOMAIN}/", static_url), static_url + ) diff --git a/takahe/utils.py b/takahe/utils.py new file mode 100644 index 00000000..3c3975ca --- /dev/null +++ b/takahe/utils.py @@ -0,0 +1,605 @@ +from typing import TYPE_CHECKING + +from django.conf import settings +from django.core.cache import cache + +from .models import * + +if TYPE_CHECKING: + from users.models import APIdentity + from users.models import User as NeoUser + + +def _int(s: str): + try: + return int(s) + except: + return -1 + + +def _rating_to_emoji(score: int, star_mode=0): + """convert score(0~10) to mastodon star emoji code""" + if score is None or score == "" or score == 0: + return "" + solid_stars = score // 2 + half_star = int(bool(score % 2)) + empty_stars = 5 - solid_stars if not half_star else 5 - solid_stars - 1 + if star_mode == 1: + emoji_code = "🌕" * solid_stars + "🌗" * half_star + "🌑" * empty_stars + else: + emoji_code = ( + settings.STAR_SOLID * solid_stars + + settings.STAR_HALF * half_star + + settings.STAR_EMPTY * empty_stars + ) + emoji_code = emoji_code.replace("::", ": :") + emoji_code = " " + emoji_code + " " + return emoji_code + + +class Takahe: + Visibilities = Post.Visibilities + + @staticmethod + def get_domain(): + domain = settings.SITE_INFO["site_domain"] + d = Domain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + d = Domain.objects.create( + domain=domain, + local=True, + service_domain=None, + notes="NeoDB", + nodeinfo=None, + ) + return d + + @staticmethod + def get_node_name_for_domain(d: str): + domain = Domain.objects.filter(domain=d).first() + if domain and domain.nodeinfo: + return domain.nodeinfo.get("metadata", {}).get("nodeName") + + @staticmethod + def init_identity_for_local_user(u: "NeoUser"): + """ + When a new local NeoDB user is created, + create a takahe user with the NeoDB user pk, + create a takahe identity, + then create a NeoDB APIdentity with the takahe identity pk. + """ + from users.models import APIdentity + + logger.info(f"User {u} initialize identity") + if not u.username: + logger.warning(f"User {u} has no username") + return None + user = User.objects.filter(pk=u.pk).first() + handler = "@" + u.username + if not user: + logger.info(f"Creating takahe user {u}") + user = User.objects.create(pk=u.pk, email=handler) + else: + if user.email != handler: + logger.warning(f"Updating takahe user {u} email to {handler}") + user.email = handler + user.save() + domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"]) + identity = Identity.objects.filter(username=u.username, local=True).first() + if not identity: + logger.info(f"Creating takahe identity {u}@{domain}") + identity = Identity.objects.create( + actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/", + profile_uri=u.url, + username=u.username, + domain=domain, + name=u.username, + local=True, + discoverable=not u.preference.no_anonymous_view, + ) + identity.generate_keypair() + if not user.identities.filter(pk=identity.pk).exists(): + user.identities.add(identity) + apidentity = APIdentity.objects.filter(pk=identity.pk).first() + if not apidentity: + logger.info(f"Creating APIdentity for {identity}") + apidentity = APIdentity.objects.create( + user=u, + id=identity.pk, + local=True, + username=u.username, + domain_name=domain.domain, + deleted=identity.deleted, + ) + elif apidentity.username != identity.username: + logger.warning( + f"Updating APIdentity {apidentity} username to {identity.username}" + ) + apidentity.username = identity.username + apidentity.save() + if u.identity != apidentity: + logger.warning(f"Linking user {u} identity to {apidentity}") + u.identity = apidentity + u.save(update_fields=["identity"]) + return apidentity + + @staticmethod + def get_identity_by_handler(username: str, domain: str) -> Identity | None: + return Identity.objects.filter( + username__iexact=username, domain__domain__iexact=domain + ).first() + + @staticmethod + def create_internal_message(message: dict): + InboxMessage.create_internal(message) + + @staticmethod + def fetch_remote_identity(handler: str) -> int | None: + InboxMessage.create_internal({"type": "FetchIdentity", "handle": handler}) + + @staticmethod + def get_identity(pk: int): + return Identity.objects.get(pk=pk) + + @staticmethod + def get_identity_by_local_user(u: "NeoUser"): + return ( + Identity.objects.filter(pk=u.identity.pk, local=True).first() + if u and u.is_authenticated and u.identity + else None + ) + + @staticmethod + def get_or_create_remote_apidentity(identity: Identity): + from users.models import APIdentity + + apid = APIdentity.objects.filter(pk=identity.pk).first() + if not apid: + if identity.local: + raise ValueError(f"local takahe identity {identity} missing APIdentity") + if not identity.domain_id: + raise ValueError(f"remote takahe identity {identity} missing domain") + apid = APIdentity.objects.create( + id=identity.pk, + user=None, + local=False, + username=identity.username, + domain_name=identity.domain_id, + deleted=identity.deleted, + ) + return apid + + @staticmethod + def get_local_user_by_identity(identity: Identity): + from users.models import User as NeoUser + + return NeoUser.objects.get(identity_id=identity.pk) if identity.local else None + + @staticmethod + def get_following_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="accepted" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="accepted" + ).values_list("source", flat=True) + return list(targets) + + @staticmethod + def get_following_request_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="pending_approval" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_requested_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="pending_approval" + ).values_list("source", flat=True) + return list(targets) + + @staticmethod + def update_follow_state( + source_pk: int, target_pk: int, from_states: list[str], to_state: str + ): + follow = Follow.objects.filter(source_id=source_pk, target_id=target_pk).first() + if ( + follow + and (not from_states or follow.state in from_states) + and follow.state != to_state + ): + follow.state = to_state + follow.save() + return follow + + @staticmethod + def follow(source_pk: int, target_pk: int): + try: + follow = Follow.objects.get(source_id=source_pk, target_id=target_pk) + if follow.state != "accepted": + follow.state = "unrequested" + follow.save() + except Follow.DoesNotExist: + source = Identity.objects.get(pk=source_pk) + follow = Follow.objects.create( + source_id=source_pk, + target_id=target_pk, + boosts=True, + uri="", + state="unrequested", + ) + follow.uri = source.actor_uri + f"follow/{follow.pk}/" + follow.save() + + @staticmethod + def unfollow(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "undone") + # InboxMessage.create_internal( + # { + # "type": "ClearTimeline", + # "object": target_identity.pk, + # "actor": self.identity.pk, + # } + # ) + + @staticmethod + def accept_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "accepting") + + @staticmethod + def reject_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "rejecting") + + @staticmethod + def get_muting_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=True, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_blocking_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_rejecting_ids(identity_pk: int) -> list[int]: + pks1 = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + pks2 = Block.objects.filter( + target_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("source", flat=True) + return list(set(list(pks1) + list(pks2))) + + @staticmethod + def block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + source = Identity.objects.get(pk=source_pk) + if not source.local: + raise ValueError(f"Cannot block/mute from remote identity {source}") + with transaction.atomic(): + block, _ = Block.objects.update_or_create( + defaults={"state": "new"}, + source_id=source_pk, + target_id=target_pk, + mute=is_mute, + ) + if block.state != "new" or not block.uri: + block.state = "new" + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + if not is_mute: + Takahe.unfollow(source_pk, target_pk) + Takahe.reject_follow_request(target_pk, source_pk) + return block + + @staticmethod + def undo_block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + Block.objects.filter( + source_id=source_pk, target_id=target_pk, mute=is_mute + ).update(state="undone") + + @staticmethod + def block(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, False) + + @staticmethod + def unblock(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, False) + + @staticmethod + def mute(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, True) + + @staticmethod + def unmute(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, True) + + @staticmethod + def _force_state_cycle(): # for unit testing only + Follow.objects.filter( + state__in=["rejecting", "undone", "pending_removal"] + ).delete() + Follow.objects.all().update(state="accepted") + Block.objects.filter(state="new").update(state="sent") + Block.objects.exclude(state="sent").delete() + + @staticmethod + def post( + author_pk: int, + pre_conetent: str, + content: str, + visibility: Visibilities, + data: dict | None = None, + post_pk: int | None = None, + post_time: datetime.datetime | None = None, + reply_to_pk: int | None = None, + ) -> Post | None: + identity = Identity.objects.get(pk=author_pk) + post = ( + Post.objects.filter(author=identity, pk=post_pk).first() + if post_pk + else None + ) + if post_pk and not post: + raise ValueError(f"Cannot find post to edit: {post_pk}") + reply_to_post = ( + Post.objects.filter(pk=reply_to_pk).first() if reply_to_pk else None + ) + if reply_to_pk and not reply_to_post: + raise ValueError(f"Cannot find post to reply: {reply_to_pk}") + if post: + post.edit_local( + pre_conetent, content, visibility=visibility, type_data=data + ) + else: + post = Post.create_local( + identity, + pre_conetent, + content, + visibility=visibility, + type_data=data, + published=post_time, + reply_to=reply_to_post, + ) + return post + + @staticmethod + def get_post(post_pk: int) -> Post | None: + return Post.objects.filter(pk=post_pk).first() + + @staticmethod + def get_posts(post_pks: list[int]): + return Post.objects.filter(pk__in=post_pks) + + @staticmethod + def get_post_url(post_pk: int) -> str | None: + post = Post.objects.filter(pk=post_pk).first() if post_pk else None + return post.object_uri if post else None + + @staticmethod + def delete_posts(post_pks): + Post.objects.filter(pk__in=post_pks).update(state="deleted") + + @staticmethod + def post_mark(mark, share_as_new_post: bool) -> Post | None: + from catalog.common import ItemCategory + from takahe.utils import Takahe + + user = mark.owner.user + tags = ( + "\n" + + user.preference.mastodon_append_tag.replace( + "[category]", str(ItemCategory(mark.item.category).label) + ) + if user.preference.mastodon_append_tag + else "" + ) + stars = _rating_to_emoji(mark.rating_grade, 1) + item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}" + + pre_conetent = ( + f'{mark.action_label}《{mark.item.display_title}》' + ) + content = f"{stars}\n{mark.comment_text or ''}{tags}" + data = { + "object": { + "relatedWith": [mark.item.ap_object_ref, mark.shelfmember.ap_object] + } + } + if mark.comment: + data["object"]["relatedWith"].append(mark.comment.ap_object) + if mark.rating: + data["object"]["relatedWith"].append(mark.rating.ap_object) + if mark.visibility == 1: + v = Takahe.Visibilities.followers + elif mark.visibility == 2: + v = Takahe.Visibilities.mentioned + elif user.preference.mastodon_publish_public: + v = Takahe.Visibilities.public + else: + v = Takahe.Visibilities.unlisted + existing_post = None if share_as_new_post else mark.shelfmember.latest_post + post = Takahe.post( + mark.owner.pk, + pre_conetent, + content, + v, + data, + existing_post.pk if existing_post else None, + mark.shelfmember.created_time, + ) + if not post: + return + for piece in [mark.shelfmember, mark.comment, mark.rating]: + if piece: + piece.link_post(post) + return post + + @staticmethod + def interact_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + interaction = PostInteraction.objects.get_or_create( + type=type, + identity_id=identity_pk, + post=post, + )[0] + if interaction.state not in ["new", "fanned_out"]: + interaction.state = "new" + interaction.save() + post.calculate_stats() + return interaction + + @staticmethod + def uninteract_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + for interaction in PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ): + interaction.state = "undone" + interaction.save() + post.calculate_stats() + + @staticmethod + def reply_post( + post_pk: int, identity_pk: int, content: str, visibility: Visibilities + ): + return Takahe.post(identity_pk, "", content, visibility, reply_to_pk=post_pk) + + @staticmethod + def like_post(post_pk: int, identity_pk: int): + return Takahe.interact_post(post_pk, identity_pk, "like") + + @staticmethod + def unlike_post(post_pk: int, identity_pk: int): + return Takahe.uninteract_post(post_pk, identity_pk, "like") + + @staticmethod + def post_liked_by(post_pk: int, identity_pk: int) -> bool: + interaction = Takahe.get_user_interaction(post_pk, identity_pk, "like") + return interaction is not None and interaction.state in ["new", "fanned_out"] + + @staticmethod + def get_user_interaction(post_pk: int, identity_pk: int, type: str): + if not post_pk or not identity_pk: + return None + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return None + return PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ).first() + + @staticmethod + def get_post_stats(post_pk: int) -> dict: + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return {} + return post.stats or {} + + @staticmethod + def get_replies_for_posts(post_pks: list[int], identity_pk: int | None): + post_uris = Post.objects.filter(pk__in=post_pks).values_list( + "object_uri", flat=True + ) + if not post_uris.exists(): + return Post.objects.none() + identity = ( + Identity.objects.filter(pk=identity_pk).first() if identity_pk else None + ) + child_queryset = ( + Post.objects.not_hidden() + .prefetch_related( + # "attachments", + "mentions", + "emojis", + ) + .select_related( + "author", + "author__domain", + ) + .filter(in_reply_to__in=post_uris) + .order_by("published") + ) + if identity: + child_queryset = child_queryset.visible_to( + identity=identity, include_replies=True + ) + else: + child_queryset = child_queryset.unlisted(include_replies=True) + return child_queryset + + @staticmethod + def html2txt(html: str) -> str: + if not html: + return "" + return FediverseHtmlParser(html).plain_text + + @staticmethod + def txt2html(txt: str) -> str: + if not txt: + return "" + return FediverseHtmlParser(linebreaks_filter(txt)).html + + @staticmethod + def update_state(obj, state): + obj.state = state + obj.state_changed = timezone.now() + obj.state_next_attempt = None + obj.state_locked_until = None + obj.save( + update_fields=[ + "state", + "state_changed", + "state_next_attempt", + "state_locked_until", + ] + ) + + @staticmethod + def get_neodb_peers(): + cache_key = "neodb_peers" + peers = cache.get(cache_key, None) + if peers is None: + peers = list( + Domain.objects.filter( + nodeinfo__protocols__contains="neodb", local=False + ).values_list("pk", flat=True) + ) + cache.set(cache_key, peers, timeout=1800) + return peers + + @staticmethod + def verify_invite(token): + if not token: + return False + invite = Invite.objects.filter(token=token).first() + return invite and invite.valid diff --git a/takahe/views.py b/takahe/views.py new file mode 100644 index 00000000..91ea44a2 --- /dev/null +++ b/takahe/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/users/account.py b/users/account.py index 5cd2858d..4f8d3cea 100644 --- a/users/account.py +++ b/users/account.py @@ -20,10 +20,12 @@ from loguru import logger from common.config import * +from common.utils import AuthedHttpRequest from journal.models import remove_data_by_user from mastodon import mastodon_request_included from mastodon.api import * from mastodon.api import verify_account +from takahe.utils import Takahe from .models import Preference, User from .tasks import * @@ -48,7 +50,13 @@ def login(request): # store redirect url in the cookie if request.GET.get("next"): request.session["next_url"] = request.GET.get("next") - + invite_status = -1 if settings.INVITE_ONLY else 0 + if settings.INVITE_ONLY and request.GET.get("invite"): + if Takahe.verify_invite(request.GET.get("invite")): + invite_status = 1 + request.session["invite"] = request.GET.get("invite") + else: + invite_status = -2 return render( request, "users/login.html", @@ -57,6 +65,7 @@ def login(request): "scope": quote(settings.MASTODON_CLIENT_SCOPE), "selected_site": selected_site, "allow_any_site": settings.MASTODON_ALLOW_ANY_SITE, + "invite_status": invite_status, }, ) else: @@ -162,18 +171,12 @@ def OAuth2_login(request): ): # swap login for existing user return swap_login(request, token, site, refresh_token) - user = authenticate(request, token=token, site=site) + user: User = authenticate(request, token=token, site=site) # type: ignore if user: # existing user user.mastodon_token = token # type: ignore user.mastodon_refresh_token = refresh_token # type: ignore user.save(update_fields=["mastodon_token", "mastodon_refresh_token"]) - auth_login(request, user) - if request.session.get("next_url") is not None: - response = redirect(request.session.get("next_url")) - del request.session["next_url"] - else: - response = redirect(reverse("common:home")) - return response + return login_existing_user(request, user) else: # newly registered user code, user_data = verify_account(site, token) if code != 200 or user_data is None: @@ -193,12 +196,36 @@ def OAuth2_login(request): def register_new_user(request, **param): + if settings.INVITE_ONLY: + if not Takahe.verify_invite(request.session.get("invite")): + return render( + request, + "common/error.html", + { + "msg": _("注册失败😫"), + "secondary_msg": _("本站仅限邀请注册"), + }, + ) + else: + del request.session["invite"] new_user = User.register(**param) request.session["new_user"] = True auth_login(request, new_user) return redirect(reverse("users:register")) +def login_existing_user(request, existing_user): + auth_login(request, existing_user) + if not existing_user.username or not existing_user.identity: + response = redirect(reverse("account:register")) + elif request.session.get("next_url") is not None: + response = redirect(request.session.get("next_url")) + del request.session["next_url"] + else: + response = redirect(reverse("common:home")) + return response + + @mastodon_request_included @login_required def logout(request): @@ -317,8 +344,7 @@ def verify_email(request): elif action == "login": user = User.objects.get(pk=s["i"]) if user.email == email: - auth_login(request, user) - return redirect(reverse("common:home")) + return login_existing_user(request, user) else: error = _("电子邮件地址不匹配") elif action == "register": @@ -336,7 +362,7 @@ def verify_email(request): @login_required -def register(request): +def register(request: AuthedHttpRequest): form = None if settings.MASTODON_ALLOW_ANY_SITE: form = RegistrationForm(request.POST) @@ -352,7 +378,7 @@ def register(request): email_cleared = False if not form.is_valid(): return render(request, "users/register.html", {"form": form}) - if request.user.username is None and form.cleaned_data["username"]: + if not request.user.username and form.cleaned_data["username"]: if User.objects.filter( username__iexact=form.cleaned_data["username"] ).exists(): @@ -390,11 +416,13 @@ def register(request): if request.user.pending_email: django_rq.get_queue("mastodon").enqueue( send_verification_link, - request.user.id, + request.user.pk, "verify", request.user.pending_email, ) messages.add_message(request, messages.INFO, _("已发送验证邮件,请查收。")) + if request.user.username and not request.user.identity_linked(): + request.user.initialize() if username_changed: messages.add_message(request, messages.INFO, _("用户名已设置。")) if email_cleared: @@ -480,9 +508,9 @@ def auth_logout(request): def clear_data_task(user_id): user = User.objects.get(pk=user_id) user_str = str(user) - remove_data_by_user(user) + if user.identity: + remove_data_by_user(user.identity) user.clear() - user.save() logger.warning(f"User {user_str} data cleared.") diff --git a/users/data.py b/users/data.py index 27590a1d..eea7213c 100644 --- a/users/data.py +++ b/users/data.py @@ -68,18 +68,6 @@ def data(request): ) -@mastodon_request_included -@login_required -def account_info(request): - return render( - request, - "users/account.html", - { - "allow_any_site": settings.MASTODON_ALLOW_ANY_SITE, - }, - ) - - @login_required def data_import_status(request): return render( diff --git a/users/management/commands/import_tasks.py b/users/management/commands/import_tasks.py new file mode 100644 index 00000000..913a1e3f --- /dev/null +++ b/users/management/commands/import_tasks.py @@ -0,0 +1,27 @@ +from django.conf import settings +from django.core.management.base import BaseCommand +from loguru import logger +from tqdm import tqdm + +from users.models import Preference, User + + +class Command(BaseCommand): + help = "Manage import tasks" + + def add_arguments(self, parser): + parser.add_argument( + "--reset", + action="store_true", + ) + + def handle(self, *args, **options): + if options["reset"]: + count = 0 + for user in tqdm(User.objects.all()): + if user.preference.import_status.get("douban_pending"): + user.preference.import_status["douban_pending"] = False + user.preference.save(update_fields=["import_status"]) + count += 1 + self.stdout.write(self.style.SUCCESS(f"{count} users reset")) + diff --git a/users/management/commands/invite.py b/users/management/commands/invite.py new file mode 100644 index 00000000..a61e6b86 --- /dev/null +++ b/users/management/commands/invite.py @@ -0,0 +1,33 @@ +from datetime import timedelta + +from django.conf import settings +from django.core.management.base import BaseCommand +from django.urls import reverse +from django.utils import timezone +from loguru import logger + +from takahe.utils import Invite + + +class Command(BaseCommand): + help = "Manage invite" + + def add_arguments(self, parser): + parser.add_argument( + "--create", + action="store_true", + ) + # parser.add_argument( + # "--revoke", + # action="store_true", + # ) + + def handle(self, *args, **options): + if options["create"]: + inv = Invite.create_random() + self.stdout.write(self.style.SUCCESS(f"Invite created: {inv.token}")) + self.stdout.write( + self.style.SUCCESS( + f"Link: {settings.SITE_INFO['site_url']}{reverse('users:login')}?invite={inv.token}" + ) + ) diff --git a/users/management/commands/refresh_following.py b/users/management/commands/refresh_following.py deleted file mode 100644 index c57329d2..00000000 --- a/users/management/commands/refresh_following.py +++ /dev/null @@ -1,21 +0,0 @@ -from datetime import timedelta - -from django.core.management.base import BaseCommand -from django.utils import timezone -from tqdm import tqdm - -from users.models import User - - -class Command(BaseCommand): - help = "Refresh following data for all users" - - def handle(self, *args, **options): - count = 0 - for user in tqdm(User.objects.all()): - user.following = user.merged_following_ids() - if user.following: - count += 1 - user.save(update_fields=["following"]) - - print(f"{count} users updated") diff --git a/users/migrations/0001_initial.py b/users/migrations/0001_initial.py index 9e000a06..6f4dd85b 100644 --- a/users/migrations/0001_initial.py +++ b/users/migrations/0001_initial.py @@ -137,9 +137,6 @@ class Migration(migrations.Migration): ), ), ], - managers=[ - ("objects", django.contrib.auth.models.UserManager()), - ], ), migrations.CreateModel( name="Preference", diff --git a/users/migrations/0012_apidentity.py b/users/migrations/0012_apidentity.py new file mode 100644 index 00000000..8c4b4e07 --- /dev/null +++ b/users/migrations/0012_apidentity.py @@ -0,0 +1,64 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:37 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + # replaces = [ + # ("users", "0012_user_local"), + # ("users", "0013_user_identity"), + # ("users", "0014_remove_user_identity_apidentity_user"), + # ("users", "0015_alter_apidentity_user"), + # ] + + dependencies = [ + ("users", "0011_preference_hidden_categories"), + ("takahe", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="APIdentity", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("local", models.BooleanField()), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ( + "domain_name", + models.CharField(blank=True, max_length=500, null=True), + ), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "user", + models.OneToOneField( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="identity", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "indexes": [ + models.Index( + fields=["local", "username"], + name="users_apide_local_2d8170_idx", + ), + models.Index( + fields=["domain_name", "username"], + name="users_apide_domain__53ffa5_idx", + ), + ], + }, + ), + ] diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py new file mode 100644 index 00000000..6488f41e --- /dev/null +++ b/users/migrations/0013_init_identity.py @@ -0,0 +1,115 @@ +# Generated by Django 4.2.4 on 2023-08-09 16:54 + +from django.conf import settings +from django.db import migrations, models, transaction +from loguru import logger +from tqdm import tqdm + +from takahe.models import Config as TakaheConfig +from takahe.models import Domain as TakaheDomain +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser + +domain = settings.SITE_INFO["site_domain"] +name = settings.SITE_INFO["site_name"] +service_domain = settings.SITE_INFO.get("site_service_domain") + + +def init_domain(apps, schema_editor): + User = apps.get_model("users", "User") + if not User.objects.exists(): + logger.warning( + "No users found, skip domain migration (if you are running initial migration for new site, pls ignore this)" + ) + return + d = TakaheDomain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + TakaheDomain.objects.create( + domain=domain, + local=True, + service_domain=service_domain, + notes="NeoDB", + nodeinfo={}, + ) + else: + logger.info(f"Takahe domain {domain} already exists") + + TakaheConfig.objects.update_or_create( + key="public_timeline", + user=None, + identity=None, + domain=None, + defaults={"json": False}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain=None, + defaults={"json": name}, + ) + TakaheConfig.objects.update_or_create( + key="site_name", + user=None, + identity=None, + domain_id=domain, + defaults={"json": name}, + ) + + +def init_identity(apps, schema_editor): + User = apps.get_model("users", "User") + if not User.objects.exists(): + logger.warning( + "No users found, skip identity migration (if you are running initial migration for new site, pls ignore this)" + ) + return + APIdentity = apps.get_model("users", "APIdentity") + tdomain = TakaheDomain.objects.filter(domain=domain).first() + if User.objects.filter(username__isnull=True).exists(): + raise ValueError("null username detected, aborting migration") + if TakaheUser.objects.exists(): + raise ValueError("existing Takahe users detected, aborting migration") + if TakaheIdentity.objects.exists(): + raise ValueError("existing Takahe identities detected, aborting migration") + if APIdentity.objects.exists(): + raise ValueError("existing APIdentity data detected, aborting migration") + logger.info(f"Creating takahe users/identities") + for user in tqdm(User.objects.all()): + username = user.username + handler = "@" + username + identity = APIdentity.objects.create( + pk=user.pk, + user=user, + local=True, + username=username, + domain_name=domain, + deleted=None if user.is_active else user.updated, + ) + takahe_user = TakaheUser.objects.create( + pk=user.pk, email=handler, admin=user.is_superuser + ) + takahe_identity = TakaheIdentity.objects.create( + pk=user.pk, + actor_uri=f"https://{service_domain or domain}/@{username}@{domain}/", + profile_uri=user.url, + username=username, + domain=tdomain, + name=username, + local=True, + discoverable=not user.preference.no_anonymous_view, + ) + takahe_identity.generate_keypair() + takahe_user.identities.add(takahe_identity) + + +class Migration(migrations.Migration): + dependencies = [ + ("users", "0012_apidentity"), + ] + + operations = [ + migrations.RunPython(init_domain), + migrations.RunPython(init_identity), + ] diff --git a/users/models/__init__.py b/users/models/__init__.py index d1e45854..14d42a2e 100644 --- a/users/models/__init__.py +++ b/users/models/__init__.py @@ -1,3 +1,4 @@ +from .apidentity import APIdentity from .preference import Preference from .report import Report from .user import User diff --git a/users/models/apidentity.py b/users/models/apidentity.py new file mode 100644 index 00000000..7d61bfc1 --- /dev/null +++ b/users/models/apidentity.py @@ -0,0 +1,278 @@ +from functools import cached_property + +from django.conf import settings +from django.db import models +from django.templatetags.static import static + +from takahe.utils import Takahe + +from .preference import Preference +from .user import User + + +class APIdentity(models.Model): + """ + An identity/actor in ActivityPub service. + + This model is used as 1:1 mapping to Takahe Identity Model + """ + + user = models.OneToOneField( + "User", models.SET_NULL, related_name="identity", null=True + ) + local = models.BooleanField() + username = models.CharField(max_length=500, blank=True, null=True) + domain_name = models.CharField(max_length=500, blank=True, null=True) + deleted = models.DateTimeField(null=True, blank=True) + + class Meta: + indexes = [ + models.Index(fields=["local", "username"]), + models.Index(fields=["domain_name", "username"]), + ] + + def __str__(self): + return f"{self.pk}:{self.username}@{self.domain_name}" + + @cached_property + def takahe_identity(self): + return Takahe.get_identity(self.pk) + + @property + def is_active(self): + return ( + self.user and self.user.is_active and self.takahe_identity.deleted is None + ) + + @property + def name(self): + return self.takahe_identity.name + + @property + def discoverable(self): + return self.takahe_identity.discoverable + + @property + def locked(self): + return self.takahe_identity.manually_approves_followers + + @property + def actor_uri(self): + return self.takahe_identity.actor_uri + + @property + def icon_uri(self): + return self.takahe_identity.icon_uri + + @property + def profile_uri(self): + return self.takahe_identity.profile_uri + + @cached_property + def display_name(self): + return self.takahe_identity.name or self.username + + @cached_property + def summary(self): + return self.takahe_identity.summary or "" + + @property + def avatar(self): + if self.local: + return ( + self.takahe_identity.icon.url + if self.takahe_identity.icon + else settings.SITE_INFO["user_icon"] + ) + else: + return f"/proxy/identity_icon/{self.pk}/" + + @property + def url(self): + return f"/users/{self.handler}/" + + @property + def preference(self): + return self.user.preference if self.user else Preference() + + @property + def full_handle(self): + return f"@{self.username}@{self.domain_name}" + + @property + def handler(self): + if self.local: + return self.username + else: + return f"@{self.username}@{self.domain_name}" + + @property + def following(self): + return Takahe.get_following_ids(self.pk) + + @property + def followers(self): + return Takahe.get_follower_ids(self.pk) + + @property + def muting(self): + return Takahe.get_muting_ids(self.pk) + + @property + def blocking(self): + return Takahe.get_blocking_ids(self.pk) + + @property + def following_identities(self): + return APIdentity.objects.filter(pk__in=self.following) + + @property + def follower_identities(self): + return APIdentity.objects.filter(pk__in=self.followers) + + @property + def muting_identities(self): + return APIdentity.objects.filter(pk__in=self.muting) + + @property + def blocking_identities(self): + return APIdentity.objects.filter(pk__in=self.blocking) + + @property + def requested_follower_identities(self): + return APIdentity.objects.filter(pk__in=self.requested_followers) + + @property + def follow_requesting_identities(self): + return APIdentity.objects.filter(pk__in=self.following_request) + + @property + def rejecting(self): + return Takahe.get_rejecting_ids(self.pk) + + @property + def ignoring(self): + return self.muting + self.rejecting + + def follow(self, target: "APIdentity"): + Takahe.follow(self.pk, target.pk) + + def unfollow(self, target: "APIdentity"): # this also cancels follow request + Takahe.unfollow(self.pk, target.pk) + + @property + def requested_followers(self): + return Takahe.get_requested_follower_ids(self.pk) + + @property + def following_request(self): + return Takahe.get_following_request_ids(self.pk) + + def accept_follow_request(self, target: "APIdentity"): + Takahe.accept_follow_request(target.pk, self.pk) + + def reject_follow_request(self, target: "APIdentity"): + Takahe.reject_follow_request(target.pk, self.pk) + + def block(self, target: "APIdentity"): + Takahe.block(self.pk, target.pk) + + def unblock(self, target: "APIdentity"): + Takahe.unblock(self.pk, target.pk) + + def mute(self, target: "APIdentity"): + Takahe.mute(self.pk, target.pk) + + def unmute(self, target: "APIdentity"): + Takahe.unmute(self.pk, target.pk) + + def is_rejecting(self, target: "APIdentity"): + return self != target and ( + target.is_blocked_by(self) or target.is_blocking(self) + ) + + def is_blocking(self, target: "APIdentity"): + return target.pk in self.blocking + + def is_blocked_by(self, target: "APIdentity"): + return target.is_blocking(self) + + def is_muting(self, target: "APIdentity"): + return target.pk in self.muting + + def is_following(self, target: "APIdentity"): + return target.pk in self.following + + def is_requesting(self, target: "APIdentity"): + return target.pk in self.following_request + + def is_requested(self, target: "APIdentity"): + return target.pk in self.requested_followers + + def is_followed_by(self, target: "APIdentity"): + return target.is_following(self) + + def is_visible_to_user(self, viewing_user: User): + return ( + (not viewing_user.is_authenticated) + or viewing_user == self.user + or ( + not self.is_blocking(viewing_user.identity) + and not self.is_blocked_by(viewing_user.identity) + ) + ) + + @classmethod + def get_by_handler(cls, handler: str) -> "APIdentity": + """ + Handler format + 'id' - local identity with username 'id' + 'id@site' - local identity with linked mastodon id == 'id@site' + '@id' - local identity with username 'id' + '@id@site' - remote activitypub identity 'id@site' + """ + s = handler.split("@") + l = len(s) + if l == 1 or (l == 2 and s[0] == ""): + return cls.objects.get( + username__iexact=s[0] if l == 1 else s[1], + local=True, + deleted__isnull=True, + ) + elif l == 2: + return cls.objects.get( + user__mastodon_username__iexact=s[0], + user__mastodon_site__iexact=s[1], + deleted__isnull=True, + ) + elif l == 3 and s[0] == "": + i = cls.objects.filter( + username__iexact=s[1], domain_name__iexact=s[2], deleted__isnull=True + ).first() + if i: + return i + if s[2].lower() != settings.SITE_INFO["site_domain"].lower(): + identity = Takahe.get_identity_by_handler(s[1], s[2]) + if identity: + return Takahe.get_or_create_remote_apidentity(identity) + raise cls.DoesNotExist(f"Identity not exist {handler}") + else: + raise cls.DoesNotExist(f"Identity handler invalid {handler}") + + @cached_property + def activity_manager(self): + from social.models import ActivityManager + + return ActivityManager(self) + + @cached_property + def shelf_manager(self): + from journal.models import ShelfManager + + return ShelfManager(self) + + @cached_property + def tag_manager(self): + from journal.models import TagManager + + return TagManager(self) diff --git a/users/models/preference.py b/users/models/preference.py index 6cc96ef9..ac5a0b45 100644 --- a/users/models/preference.py +++ b/users/models/preference.py @@ -20,6 +20,7 @@ from common.utils import GenerateDateUUIDMediaFilePath from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe from .user import User diff --git a/users/models/report.py b/users/models/report.py index caabd49c..4a65d2a2 100644 --- a/users/models/report.py +++ b/users/models/report.py @@ -1,24 +1,9 @@ -import hashlib -import re -from functools import cached_property - from django.conf import settings -from django.contrib.auth.models import AbstractUser -from django.core import validators -from django.core.exceptions import ValidationError -from django.core.serializers.json import DjangoJSONEncoder from django.db import models -from django.db.models import F, Q, Value -from django.db.models.functions import Concat, Lower -from django.templatetags.static import static -from django.urls import reverse -from django.utils import timezone -from django.utils.deconstruct import deconstructible from django.utils.translation import gettext_lazy as _ from loguru import logger from common.utils import GenerateDateUUIDMediaFilePath -from management.models import Announcement from mastodon.api import * from .user import User diff --git a/users/models/user.py b/users/models/user.py index 7f77db0c..7cc4dc0c 100644 --- a/users/models/user.py +++ b/users/models/user.py @@ -1,14 +1,13 @@ import hashlib import re from functools import cached_property -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, ClassVar -from django.contrib.auth.models import AbstractUser +from django.contrib.auth.models import AbstractUser, BaseUserManager from django.contrib.auth.validators import UnicodeUsernameValidator -from django.core import validators from django.core.exceptions import ValidationError from django.db import models -from django.db.models import F, Q, Value +from django.db.models import F, Manager, Q, Value from django.db.models.functions import Concat, Lower from django.templatetags.static import static from django.urls import reverse @@ -19,8 +18,10 @@ from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe if TYPE_CHECKING: + from .apidentity import APIdentity from .preference import Preference _RESERVED_USERNAMES = [ @@ -47,7 +48,26 @@ def __call__(self, value): return super().__call__(value) +class UserManager(BaseUserManager): + def create_user(self, username, email, password=None): + Takahe.get_domain() # ensure configuration is complete + user = User.register(username=username, email=email) + return user + + def create_superuser(self, username, email, password=None): + from takahe.models import User as TakaheUser + + Takahe.get_domain() # ensure configuration is complete + user = User.register(username=username, email=email, is_superuser=True) + tu = TakaheUser.objects.get(pk=user.pk, email="@" + username) + tu.admin = True + tu.set_password(password) + tu.save() + return user + + class User(AbstractUser): + identity: "APIdentity" preference: "Preference" username_validator = UsernameValidator() username = models.CharField( @@ -111,6 +131,7 @@ class User(AbstractUser): # store the latest read announcement id, # every time user read the announcement update this field read_announcement_index = models.PositiveIntegerField(default=0) + objects: ClassVar[UserManager] = UserManager() class Meta: constraints = [ @@ -142,15 +163,6 @@ class Meta: ), ] - @staticmethod - def register(**param): - from .preference import Preference - - new_user = User(**param) - new_user.save() - Preference.objects.create(user=new_user) - return new_user - @cached_property def mastodon_acct(self): return ( @@ -174,18 +186,15 @@ def display_name(self): @property def avatar(self): - if self.mastodon_account: - return self.mastodon_account.get("avatar") or static("img/avatar.svg") - if self.email: - return ( - "https://www.gravatar.com/avatar/" - + hashlib.md5(self.email.lower().encode()).hexdigest() - ) - return static("img/avatar.svg") + return ( + self.identity.avatar if self.identity else settings.SITE_INFO["user_icon"] + ) @property def handler(self): - return self.mastodon_acct or self.username or f"~{self.pk}" + return ( + f"{self.username}" if self.username else self.mastodon_acct or f"~{self.pk}" + ) @property def url(self): @@ -195,103 +204,8 @@ def __str__(self): return f'{self.pk}:{self.username or ""}:{self.mastodon_acct}' @property - def ignoring(self): - return self.muting + self.rejecting - - def follow(self, target: "User"): - if ( - target is None - or target.locked - or self.is_following(target) - or self.is_blocking(target) - or self.is_blocked_by(target) - ): - return False - self.local_following.add(target) - self.following.append(target.pk) - self.save(update_fields=["following"]) - return True - - def unfollow(self, target: "User"): - if target and target in self.local_following.all(): - self.local_following.remove(target) - if ( - target.pk in self.following - and target.mastodon_acct not in self.mastodon_following - ): - self.following.remove(target.pk) - self.save(update_fields=["following"]) - return True - return False - - def remove_follower(self, target: "User"): - if target is None or self not in target.local_following.all(): - return False - target.local_following.remove(self) - if ( - self.pk in target.following - and self.mastodon_acct not in target.mastodon_following - ): - target.following.remove(self.pk) - target.save(update_fields=["following"]) - return True - - def block(self, target: "User"): - if target is None or target in self.local_blocking.all(): - return False - self.local_blocking.add(target) - if target.pk in self.following: - self.following.remove(target.pk) - self.save(update_fields=["following"]) - if self.pk in target.following: - target.following.remove(self.pk) - target.save(update_fields=["following"]) - if target in self.local_following.all(): - self.local_following.remove(target) - if self in target.local_following.all(): - target.local_following.remove(self) - if target.pk not in self.rejecting: - self.rejecting.append(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk not in target.rejecting: - target.rejecting.append(self.pk) - target.save(update_fields=["rejecting"]) - return True - - def unblock(self, target: "User"): - if target and target in self.local_blocking.all(): - self.local_blocking.remove(target) - if not self.is_blocked_by(target): - if target.pk in self.rejecting: - self.rejecting.remove(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk in target.rejecting: - target.rejecting.remove(self.pk) - target.save(update_fields=["rejecting"]) - return True - return False - - def mute(self, target: "User"): - if ( - target is None - or target in self.local_muting.all() - or target.mastodon_acct in self.mastodon_mutes - ): - return False - self.local_muting.add(target) - if target.pk not in self.muting: - self.muting.append(target.pk) - self.save() - return True - - def unmute(self, target: "User"): - if target and target in self.local_muting.all(): - self.local_muting.remove(target) - if target.pk in self.muting: - self.muting.remove(target.pk) - self.save() - return True - return False + def registration_complete(self): + return self.username is not None def clear(self): if self.mastodon_site == "removed" and not self.is_active: @@ -313,40 +227,13 @@ def clear(self): self.mastodon_blocks = [] self.mastodon_domain_blocks = [] self.mastodon_account = {} + self.save() + self.identity.deleted = timezone.now() + self.identity.save() - def merge_relationships(self): - self.muting = self.merged_muting_ids() - self.rejecting = self.merged_rejecting_ids() - # caculate following after rejecting is merged - self.following = self.merged_following_ids() - - @classmethod - def merge_rejected_by(cls): - """ - Caculate rejecting field to include blocked by for external users - Should be invoked after invoking merge_relationships() for all users - """ - # FIXME this is quite inifficient, should only invoked in async task - external_users = list( - cls.objects.filter(mastodon_username__isnull=False, is_active=True) - ) - reject_changed = [] - follow_changed = [] - for u in external_users: - for v in external_users: - if v.pk in u.rejecting and u.pk not in v.rejecting: - v.rejecting.append(u.pk) - if v not in reject_changed: - reject_changed.append(v) - if u.pk in v.following: - v.following.remove(u.pk) - if v not in follow_changed: - follow_changed.append(v) - for u in reject_changed: - u.save(update_fields=["rejecting"]) - for u in follow_changed: - u.save(update_fields=["following"]) - return len(follow_changed) + len(reject_changed) + def sync_relationships(self): + # FIXME + pass def refresh_mastodon_data(self): """Try refresh account data from mastodon server, return true if refreshed successfully, note it will not save to db""" @@ -390,112 +277,13 @@ def refresh_mastodon_data(self): self.mastodon_domain_blocks = get_related_acct_list( self.mastodon_site, self.mastodon_token, "/api/v1/domain_blocks" ) - self.merge_relationships() + self.sync_relationships() updated = True elif code == 401: logger.error(f"Refresh mastodon data error 401 for {self}") self.mastodon_token = "" return updated - def merged_following_ids(self): - fl = [] - for m in self.mastodon_following: - target = User.get(m) - if target and ( - (not target.mastodon_locked) - or self.mastodon_acct in target.mastodon_followers - ): - fl.append(target.pk) - for user in self.local_following.all(): - if user.pk not in fl and not user.locked and not user.is_blocking(self): - fl.append(user.pk) - fl = [x for x in fl if x not in self.rejecting] - return sorted(fl) - - def merged_muting_ids(self): - external_muting_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_mutes) - .values_list("pk", flat=True) - ) - l = list( - set( - external_muting_user_ids - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def merged_rejecting_ids(self): - domain_blocked_user_ids = list( - User.objects.filter( - mastodon_site__in=self.mastodon_domain_blocks - ).values_list("pk", flat=True) - ) - external_blocking_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_blocks) - .values_list("pk", flat=True) - ) - l = list( - set( - domain_blocked_user_ids - + external_blocking_user_ids - + list(self.local_blocking.all().values_list("pk", flat=True)) - + list(self.local_blocked_by.all().values_list("pk", flat=True)) # type: ignore - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def is_blocking(self, target): - return ( - ( - target in self.local_blocking.all() - or target.mastodon_acct in self.mastodon_blocks - or target.mastodon_site in self.mastodon_domain_blocks - ) - if target.is_authenticated - else self.preference.no_anonymous_view - ) - - def is_blocked_by(self, target): - return target.is_authenticated and target.is_blocking(self) - - def is_muting(self, target): - return target.pk in self.muting or target.mastodon_acct in self.mastodon_mutes - - def is_following(self, target): - return ( - self.mastodon_acct in target.mastodon_followers - if target.locked - else target.pk in self.following - # or target.mastodon_acct in self.mastodon_following - # or self.mastodon_acct in target.mastodon_followers - ) - - def is_followed_by(self, target): - return target.is_following(self) - - def get_mark_for_item(self, item): - params = {item.__class__.__name__.lower() + "_id": item.id, "owner": self} - mark = item.mark_class.objects.filter(**params).first() - return mark - - def get_max_visibility(self, viewer): - if not viewer.is_authenticated: - return 0 - elif viewer == self: - return 2 - elif viewer.is_blocked_by(self): - return -1 - elif viewer.is_following(self): - return 1 - else: - return 0 - @property def unread_announcements(self): unread_announcements = Announcement.objects.filter( @@ -503,59 +291,77 @@ def unread_announcements(self): ).order_by("-pk") return unread_announcements + @property + def activity_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.activity_manager + + @property + def shelf_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.shelf_manager + + @property + def tag_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.tag_manager + @classmethod def get(cls, name, case_sensitive=False): if isinstance(name, str): - sp = name.split("@") if name.startswith("~"): try: query_kwargs = {"pk": int(name[1:])} except: return None - elif len(sp) == 1: + elif name.startswith("@"): query_kwargs = { - "username__iexact" if case_sensitive else "username": name - } - elif len(sp) == 2: - query_kwargs = { - "mastodon_username__iexact" - if case_sensitive - else "mastodon_username": sp[0], - "mastodon_site__iexact" - if case_sensitive - else "mastodon_site": sp[1], + "username__iexact" if case_sensitive else "username": name[1:] } else: - return None + sp = name.split("@") + if len(sp) == 2: + query_kwargs = { + "mastodon_username__iexact" + if case_sensitive + else "mastodon_username": sp[0], + "mastodon_site__iexact" + if case_sensitive + else "mastodon_site": sp[1], + } + else: + return None elif isinstance(name, int): query_kwargs = {"pk": name} else: return None return User.objects.filter(**query_kwargs).first() - @property - def tags(self): - from journal.models import TagManager - - return TagManager.all_tags_for_user(self) + @classmethod + def register(cls, **param): + from .preference import Preference - @cached_property - def tag_manager(self): - from journal.models import TagManager + new_user = cls(**param) + new_user.save() + Preference.objects.create(user=new_user) + if new_user.username: # TODO make username required in registeration + new_user.initialize() + return new_user - return TagManager.get_manager_for_user(self) + def identity_linked(self): + from .apidentity import APIdentity - @cached_property - def shelf_manager(self): - from journal.models import ShelfManager + return APIdentity.objects.filter(user=self).exists() - return ShelfManager.get_manager_for_user(self) + def initialize(self): + Takahe.init_identity_for_local_user(self) + self.identity.shelf_manager - @cached_property - def activity_manager(self): - from social.models import ActivityManager - return ActivityManager.get_manager_for_user(self) +# TODO the following models should be deprecated soon class Follow(models.Model): diff --git a/users/profile.py b/users/profile.py new file mode 100644 index 00000000..994a7c81 --- /dev/null +++ b/users/profile.py @@ -0,0 +1,84 @@ +from datetime import timedelta +from typing import Any, Dict +from urllib.parse import quote + +import django_rq +from django import forms +from django.conf import settings +from django.contrib import auth, messages +from django.contrib.auth import authenticate +from django.contrib.auth.decorators import login_required +from django.core.cache import cache +from django.core.exceptions import BadRequest, ObjectDoesNotExist +from django.core.mail import send_mail +from django.core.signing import TimestampSigner +from django.core.validators import EmailValidator +from django.db.models import Count, Q +from django.http import HttpResponse, HttpResponseRedirect +from django.shortcuts import get_object_or_404, redirect, render +from django.urls import reverse +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ +from loguru import logger + +from common.config import * +from common.utils import AuthedHttpRequest +from journal.exporters.doufen import export_marks_task +from journal.importers.douban import DoubanImporter +from journal.importers.goodreads import GoodreadsImporter +from journal.importers.opml import OPMLImporter +from journal.models import remove_data_by_user, reset_journal_visibility_for_user +from mastodon import mastodon_request_included +from mastodon.api import * +from mastodon.api import verify_account +from social.models import reset_social_visibility_for_user +from takahe.models import Identity as TakaheIdentity +from takahe.utils import Takahe + +from .models import Preference, User +from .tasks import * + + +class ProfileForm(forms.ModelForm): + class Meta: + model = TakaheIdentity + fields = [ + "name", + "summary", + "manually_approves_followers", + "discoverable", + "icon", + ] + + def clean_summary(self): + return Takahe.txt2html(self.cleaned_data["summary"]) + + +@login_required +def account_info(request): + profile_form = ProfileForm( + instance=request.user.identity.takahe_identity, + initial={ + "summary": Takahe.html2txt(request.user.identity.summary), + }, + ) + return render( + request, + "users/account.html", + { + "allow_any_site": settings.MASTODON_ALLOW_ANY_SITE, + "profile_form": profile_form, + }, + ) + + +@login_required +def account_profile(request): + if request.method == "POST": + form = ProfileForm( + request.POST, request.FILES, instance=request.user.identity.takahe_identity + ) + if form.is_valid(): + i = form.save() + Takahe.update_state(i, "edited") + return HttpResponseRedirect(reverse("users:info")) diff --git a/users/tasks.py b/users/tasks.py index 06d684a1..c6cb4c8a 100644 --- a/users/tasks.py +++ b/users/tasks.py @@ -42,6 +42,4 @@ def refresh_all_mastodon_data_task(ttl_hours): else: logger.warning(f"Missing token for {user}") logger.info(f"{count} users updated") - c = User.merge_rejected_by() - logger.info(f"{c} users's rejecting list updated") logger.info(f"Mastodon data refresh done") diff --git a/users/templates/users/account.html b/users/templates/users/account.html index d600fc40..a960bb47 100644 --- a/users/templates/users/account.html +++ b/users/templates/users/account.html @@ -19,7 +19,7 @@

{% if allow_any_site %}
-
+
{% trans '用户名、电子邮件与社交身份' %} @@ -91,28 +91,50 @@
{% endif %} -
+
+
+ 昵称、头像与其它个人信息 + + {% include "_field.html" with field=profile_form.name %} + {% include "_field.html" with field=profile_form.summary %} + {% include "_field.html" with field=profile_form.icon %} + {% include "_field.html" with field=profile_form.discoverable %} + {% include "_field.html" with field=profile_form.manually_approves_followers %} + {% csrf_token %} + + +
+
+
{% trans '正在关注的用户' %} - {% include 'users/relationship_list.html' with name="关注" id="follow" list=request.user.local_following.all %} + {% include 'users/relationship_list.html' with name="关注" id="follow" list=request.user.identity.following_identities.all %}
-
+
{% trans '关注了你的用户' %} - {% include 'users/relationship_list.html' with name="关注者" id="follower" list=request.user.local_followers.all %} + {% include 'users/relationship_list.html' with name="关注者" id="follower" list=request.user.identity.follower_identities.all %} +
+
+
+
+ {% trans '请求关注你的用户' %} + {% include 'users/relationship_list.html' with name="请求关注者" id="follow_request" list=request.user.identity.requested_follower_identities.all %}
{% trans '已隐藏的用户' %} - {% include 'users/relationship_list.html' with name="隐藏" id="mute" list=request.user.local_muting.all %} + {% include 'users/relationship_list.html' with name="隐藏" id="mute" list=request.user.identity.muting_identities.all %}
{% trans '已屏蔽的用户' %} - {% include 'users/relationship_list.html' with name="屏蔽" id="block" list=request.user.local_blocking.all %} + {% include 'users/relationship_list.html' with name="屏蔽" id="block" list=request.user.identity.blocking_identities.all %}
@@ -126,7 +148,7 @@ value="{% trans '同步' %}" {% if not request.user.mastodon_username %}disabled{% endif %} /> - {% if user.mastodon_last_refresh %}上次更新时间 {{ user.mastodon_last_refresh }}{% endif %} + {% if request.user.mastodon_last_refresh %}上次更新时间 {{ request.user.mastodon_last_refresh }}{% endif %}
为了正确高效的展示短评和评论,{{ site_name }}会缓存你在联邦宇宙的关注、屏蔽和隐藏列表。如果你刚刚更新过帐户的上锁状态、增减过关注、隐藏或屏蔽,希望立即生效,可以点击这里立刻更新;这类信息也会每天自动同步。 @@ -167,7 +189,7 @@
- {% include "_sidebar.html" with show_profile=1 %} + {% include "_sidebar.html" with show_profile=1 identity=request.user.identity %}
{% include "_footer.html" %} diff --git a/users/templates/users/data.html b/users/templates/users/data.html index 27390b53..1844399e 100644 --- a/users/templates/users/data.html +++ b/users/templates/users/data.html @@ -177,7 +177,7 @@
- {% include "_sidebar.html" with show_profile=1 %} + {% include "_sidebar.html" with show_profile=1 identity=request.user.identity %} {% include "_footer.html" %} diff --git a/users/templates/users/fetch_identity_failed.html b/users/templates/users/fetch_identity_failed.html new file mode 100644 index 00000000..2e9a64e4 --- /dev/null +++ b/users/templates/users/fetch_identity_failed.html @@ -0,0 +1,4 @@ +

+ + 无法找到用户,请确认拼写正确;也可能服务器正忙,请稍后再尝试。 +

diff --git a/users/templates/users/fetch_identity_pending.html b/users/templates/users/fetch_identity_pending.html new file mode 100644 index 00000000..3b41e5fb --- /dev/null +++ b/users/templates/users/fetch_identity_pending.html @@ -0,0 +1,42 @@ +{% load static %} +{% load i18n %} +{% load l10n %} +{% load humanize %} +{% load admin_url %} +{% load mastodon %} +{% load oauth_token %} +{% load truncate %} +{% load highlight %} +{% load thumb %} + + + + + + {{ site_name }} - {% trans '查询用户' %} + {% include "common_libs.html" with jquery=0 v2=1 %} + + + {% include '_header.html' %} +
+
+
+ {% if handle %} +
正在从联邦网络查询{{ handle }}
+
+ +
+ {% else %} +
获取系统繁忙,请稍等几秒钟再搜索
+ {% endif %} +
+
+ +
+ {% include '_footer.html' %} + + diff --git a/users/templates/users/fetch_identity_refresh.html b/users/templates/users/fetch_identity_refresh.html new file mode 100644 index 00000000..635c1c29 --- /dev/null +++ b/users/templates/users/fetch_identity_refresh.html @@ -0,0 +1,5 @@ +
+ +
diff --git a/users/templates/users/follow_locked.html b/users/templates/users/follow_locked.html index c96f9db7..557cc756 100644 --- a/users/templates/users/follow_locked.html +++ b/users/templates/users/follow_locked.html @@ -9,10 +9,10 @@ 在联邦宇宙关注用户
-

{{ user.display_name | default:user.mastodon_acct }} 已经开启了关注审核,请复制以下ID,到你所在的联邦宇宙实例中去关注ta。

+

{{ identity.display_name }} 已经开启了关注审核,请复制以下ID,到你所在的联邦宇宙实例中去关注ta。

@{{ user.mastodon_acct }} + data-tooltip="点击复制">@{{ identity.user.mastodon_acct }}

如果你已经关注了ta,请耐心等待ta的审核。

{% if not request.user.mastodon_acct %} diff --git a/users/templates/users/home_anonymous.html b/users/templates/users/home_anonymous.html index 5fb098ee..5f20b9f9 100644 --- a/users/templates/users/home_anonymous.html +++ b/users/templates/users/home_anonymous.html @@ -6,21 +6,23 @@ - {{ site_name }} - {{ user.display_name }} + {{ site_name }} - {{ identity.handler }} + content="{{ site_name }} - {{ identity.handler }}的主页"> - {% if user.mastodon_account.url %} - Mastodon verification + {% if identity.user.mastodon_account.url %} + Mastodon verification {% endif %} diff --git a/users/templates/users/login.html b/users/templates/users/login.html index caa914b8..ad737738 100644 --- a/users/templates/users/login.html +++ b/users/templates/users/login.html @@ -44,10 +44,10 @@
- +
- {% if user.is_authenticated %} + {% if request.user.is_authenticated %} {% trans '前往首页' %} {% else %}
@@ -132,13 +132,22 @@ }); {% else %} - + {% for site in sites %}{% endfor %} {% endif %} + {% if invite_status %} + + {% if invite_status == 1 %} + 邀请链接有效,可注册新用户 + {% elif invite_status == -1 %} + 本站目前为邀请注册,已有账户可直接登入,新用户请使用有效邀请链接注册 + {% elif invite_status == -2 %} + 邀请链接无效,已有账户可直接登入,新用户请使用有效邀请链接注册 + {% endif %} + + {% endif %}
{% endif %}
部分模块加载超时,请检查网络(翻墙)设置。
diff --git a/users/templates/users/preferences.html b/users/templates/users/preferences.html index 01619859..ca8c30a0 100644 --- a/users/templates/users/preferences.html +++ b/users/templates/users/preferences.html @@ -159,7 +159,7 @@
自定义样式代码 (实验功能)
- {% include "_sidebar.html" with show_profile=1 %} + {% include "_sidebar.html" with show_profile=1 identity=request.user.identity %} {% include "_footer.html" %} diff --git a/users/templates/users/profile_actions.html b/users/templates/users/profile_actions.html index 206a71d7..d01a63d6 100644 --- a/users/templates/users/profile_actions.html +++ b/users/templates/users/profile_actions.html @@ -1,10 +1,10 @@ {% load mastodon %} -{% current_user_relationship user as relationship %} +{% current_user_relationship identity as relationship %} {% if relationship.rejecting %} 已屏蔽 @@ -18,80 +18,108 @@ {% endif %} {% if show_home %} - + {% endif %} + {% if not identity.local %} + + + + + + {% elif identity.user and identity.user.mastodon_account %} + + + + + + {% endif %} + {% if relationship.requested %} + + + + + + + + + + + {% endif %} {% if relationship.following %} - {% if relationship.unfollowable %} - - - - - - {% else %} - - {% endif %} + + + + + + {% elif relationship.requesting %} + + + + + {% else %} - {% if user.locked %} - - - - - - {% else %} - - - - - - {% endif %} + + + + + {% endif %} {% if not relationship.muting %} - + - {% elif relationship.unmutable %} + {% else %} - - - - {% else %} - - - + {% endif %} - {% comment %} {% trans '投诉用户' %} {% endcomment %} + {% comment %} {% trans '投诉用户' %} {% endcomment %} {% endif %} diff --git a/users/templates/users/register.html b/users/templates/users/register.html index 7419f69f..d1329ef0 100644 --- a/users/templates/users/register.html +++ b/users/templates/users/register.html @@ -12,7 +12,7 @@
- +
{% if request.session.new_user %}

欢迎来到{{ site_name }},{{ request.user.mastodon_acct }}!

diff --git a/users/templates/users/relationship_list.html b/users/templates/users/relationship_list.html index 1907af75..1c0b1c4c 100644 --- a/users/templates/users/relationship_list.html +++ b/users/templates/users/relationship_list.html @@ -1,10 +1,10 @@ -{% for user in list %} +{% for identity in list %}

{% include 'users/profile_actions.html' with show_home=1 %} {{ user.handler }} + onclick="navigator.clipboard.writeText(this.innerText);$(this).data('tooltip','copied');">{{ identity.handler }}

{% empty %}

无数据

diff --git a/users/templates/users/verify_email.html b/users/templates/users/verify_email.html index bc1e6284..c7877428 100644 --- a/users/templates/users/verify_email.html +++ b/users/templates/users/verify_email.html @@ -12,12 +12,12 @@
- +

验证电子邮件

{% if success %}

- {{ user.email }} 验证成功,点击这里返回首页。 + {{ request.user.email }} 验证成功,点击这里返回首页

{% else %}

diff --git a/users/tests.py b/users/tests.py index 3e801a29..3e0a555e 100644 --- a/users/tests.py +++ b/users/tests.py @@ -1,168 +1,91 @@ +from django.conf import settings from django.test import TestCase +from takahe.utils import Takahe + from .models import * -from .models.user import Block, Follow, Mute class UserTest(TestCase): - def setUp(self): - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") - - def test_local_follow(self): - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_following_ids(), [self.bob.pk]) - self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertTrue(self.alice.is_following(self.bob)) - self.assertTrue(self.bob.is_followed_by(self.alice)) + databases = "__all__" - self.assertFalse(self.alice.follow(self.bob)) - self.assertEqual( - Follow.objects.filter(owner=self.alice, target=self.bob).count(), 1 + def setUp(self): + self.alice = User.register( + mastodon_site="MySpace", mastodon_username="Alice", username="alice" + ).identity + self.bob = User.register( + mastodon_site="KKCity", mastodon_username="Bob", username="bob" + ).identity + self.domain = settings.SITE_INFO.get("site_domain") + + def test_handle(self): + self.assertEqual(APIdentity.get_by_handler("Alice"), self.alice) + self.assertEqual(APIdentity.get_by_handler("@alice"), self.alice) + self.assertEqual(APIdentity.get_by_handler("Alice@MySpace"), self.alice) + self.assertEqual(APIdentity.get_by_handler("alice@myspace"), self.alice) + self.assertEqual(APIdentity.get_by_handler("@alice@" + self.domain), self.alice) + self.assertEqual(APIdentity.get_by_handler("@Alice@" + self.domain), self.alice) + self.assertRaises( + APIdentity.DoesNotExist, APIdentity.get_by_handler, "@Alice@MySpace" ) - self.assertEqual(self.alice.following, [self.bob.pk]) - - self.assertTrue(self.alice.unfollow(self.bob)) - self.assertFalse( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() + self.assertRaises( + APIdentity.DoesNotExist, APIdentity.get_by_handler, "@alice@KKCity" ) - self.assertFalse(self.alice.is_following(self.bob)) - self.assertFalse(self.bob.is_followed_by(self.alice)) - self.assertEqual(self.alice.following, []) - def test_locked(self): - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.follow(self.bob)) - self.bob.mastodon_locked = False - self.bob.save() - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.is_following(self.bob)) + def test_fetch(self): + pass - def test_external_follow(self): - self.alice.mastodon_following.append(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() + def test_follow(self): + self.alice.follow(self.bob) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_following(self.bob)) + self.assertTrue(self.bob.is_followed_by(self.alice)) self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertFalse(self.alice.follow(self.bob)) + self.assertEqual(self.bob.followers, [self.alice.pk]) - self.alice.mastodon_following.remove(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() + self.alice.unfollow(self.bob) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_following(self.bob)) + self.assertFalse(self.bob.is_followed_by(self.alice)) self.assertEqual(self.alice.following, []) - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) + self.assertEqual(self.bob.followers, []) - def test_local_mute(self): + def test_mute(self): self.alice.mute(self.bob) - self.assertTrue(Mute.objects.filter(owner=self.alice, target=self.bob).exists()) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_muting(self.bob)) - - self.alice.mute(self.bob) - self.assertEqual( - Mute.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) self.assertEqual(self.alice.ignoring, [self.bob.pk]) + self.assertEqual(self.alice.rejecting, []) - self.alice.unmute(self.bob) - self.assertFalse( - Mute.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_external_mute(self): - self.alice.mastodon_mutes.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - - self.alice.mastodon_mutes.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_local_block_follow(self): - self.alice.block(self.bob) - self.assertEqual(self.bob.follow(self.alice), False) - self.alice.unblock(self.bob) - self.assertEqual(self.bob.follow(self.alice), True) - self.assertEqual(self.bob.following, [self.alice.pk]) + def test_block(self): self.alice.block(self.bob) - self.assertEqual(self.bob.following, []) - - def test_local_block(self): - self.alice.block(self.bob) - self.assertTrue( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_blocking(self.bob)) self.assertTrue(self.bob.is_blocked_by(self.alice)) - - self.alice.block(self.bob) - self.assertEqual( - Block.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) + self.assertEqual(self.alice.rejecting, [self.bob.pk]) self.assertEqual(self.alice.ignoring, [self.bob.pk]) self.alice.unblock(self.bob) - self.assertFalse( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_blocking(self.bob)) self.assertFalse(self.bob.is_blocked_by(self.alice)) + self.assertEqual(self.alice.rejecting, []) self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - def test_external_block(self): - self.bob.follow(self.alice) - self.assertEqual(self.bob.following, [self.alice.pk]) - self.alice.mastodon_blocks.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(self.bob.following, [self.alice.pk]) - self.assertEqual(self.bob.rejecting, []) - self.assertEqual(User.merge_rejected_by(), 2) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - self.assertEqual(self.bob.following, []) - - self.alice.mastodon_blocks.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - - def test_external_domain_block(self): - self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(User.merge_rejected_by(), 1) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - - self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) + # def test_external_domain_block(self): + # self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) + # self.alice.save() + # self.assertTrue(self.alice.is_blocking(self.bob)) + # self.assertTrue(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) + # self.alice.merge_relationships() + # self.assertEqual(self.alice.rejecting, [self.bob.pk]) + # self.alice.save() + # self.assertEqual(User.merge_rejected_by(), 1) + # self.bob.refresh_from_db() + # self.assertEqual(self.bob.rejecting, [self.alice.pk]) + + # self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) + # self.assertFalse(self.alice.is_blocking(self.bob)) + # self.assertFalse(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), []) diff --git a/users/urls.py b/users/urls.py index c634650c..f1ca1753 100644 --- a/users/urls.py +++ b/users/urls.py @@ -12,8 +12,10 @@ path("register", register, name="register"), path("connect", connect, name="connect"), path("reconnect", reconnect, name="reconnect"), + path("fetch_refresh", fetch_refresh, name="fetch_refresh"), path("data", data, name="data"), path("info", account_info, name="info"), + path("profile", account_profile, name="profile"), path("data/import/status", data_import_status, name="import_status"), path("data/import/goodreads", import_goodreads, name="import_goodreads"), path("data/import/douban", import_douban, name="import_douban"), @@ -26,9 +28,18 @@ path("preferences", preferences, name="preferences"), path("logout", logout, name="logout"), path("layout", set_layout, name="set_layout"), - path("locked/", follow_locked, name="locked"), path("follow/", follow, name="follow"), path("unfollow/", unfollow, name="unfollow"), + path( + "accept_follow_request/", + accept_follow_request, + name="accept_follow_request", + ), + path( + "reject_follow_request/", + reject_follow_request, + name="reject_follow_request", + ), path("mute/", mute, name="mute"), path("unmute/", unmute, name="unmute"), path("block/", block, name="block"), diff --git a/users/views.py b/users/views.py index 62282203..f82b961d 100644 --- a/users/views.py +++ b/users/views.py @@ -9,18 +9,25 @@ from django.utils.translation import gettext_lazy as _ from common.config import * +from common.utils import ( + AuthedHttpRequest, + HTTPResponseHXRedirect, + target_identity_required, +) from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe from .account import * from .data import * from .forms import ReportForm -from .models import Preference, Report, User +from .models import APIdentity, Preference, Report, User +from .profile import account_info, account_profile -def render_user_not_found(request): +def render_user_not_found(request, user_name=""): sec_msg = _("😖哎呀,这位用户好像还没有加入本站,快去联邦宇宙呼唤TA来注册吧!") - msg = _("未找到该用户") + msg = _("未找到用户") + user_name return render( request, "common/error.html", @@ -42,82 +49,145 @@ def render_user_blocked(request): ) +def query_identity(request, handle): + try: + i = APIdentity.get_by_handler(handle) + return redirect(i.url) + except APIdentity.DoesNotExist: + if len(handle.split("@")) == 3: + Takahe.fetch_remote_identity(handle) + return render( + request, "users/fetch_identity_pending.html", {"handle": handle} + ) + else: + return render_user_not_found(request, handle) + + +def fetch_refresh(request): + handle = request.GET.get("handle", "") + try: + i = APIdentity.get_by_handler(handle) + return HTTPResponseHXRedirect(i.url) + except: + retry = int(request.GET.get("retry", 0)) + 1 + if retry > 10: + return render(request, "users/fetch_identity_failed.html") + else: + return render( + request, + "users/fetch_identity_refresh.html", + {"handle": handle, "retry": retry, "delay": retry * 2}, + ) + + @login_required -def follow(request, user_name): +@target_identity_required +def follow(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.follow(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: - raise BadRequest() + request.user.identity.follow(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def unfollow(request, user_name): +@target_identity_required +def unfollow(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.unfollow(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: - raise BadRequest() + request.user.identity.unfollow(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def mute(request, user_name): +@target_identity_required +def mute(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.mute(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: - raise BadRequest() + request.user.identity.mute(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def unmute(request, user_name): +@target_identity_required +def unmute(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.unmute(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: - raise BadRequest() + request.user.identity.unmute(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def block(request, user_name): +@target_identity_required +def block(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.block(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: - raise BadRequest() + request.user.identity.block(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def unblock(request, user_name): +@target_identity_required +def unblock(request: AuthedHttpRequest, user_name): if request.method != "POST": raise BadRequest() - user = User.get(user_name) - if request.user.unblock(user): - return render(request, "users/profile_actions.html", context={"user": user}) - else: + request.user.identity.unblock(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) + + +@login_required +@target_identity_required +def accept_follow_request(request: AuthedHttpRequest, user_name): + if request.method != "POST": raise BadRequest() + request.user.identity.accept_follow_request(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def follow_locked(request, user_name): - user = User.get(user_name) - return render(request, "users/follow_locked.html", context={"user": user}) +@target_identity_required +def reject_follow_request(request: AuthedHttpRequest, user_name): + if request.method != "POST": + raise BadRequest() + request.user.identity.reject_follow_request(request.target_identity) + return render( + request, + "users/profile_actions.html", + context={"identity": request.target_identity}, + ) @login_required -def set_layout(request): +def set_layout(request: AuthedHttpRequest): if request.method == "POST": - layout = json.loads(request.POST.get("layout")) + layout = json.loads(request.POST.get("layout", {})) # type: ignore if request.POST.get("name") == "profile": request.user.preference.profile_layout = layout request.user.preference.save(update_fields=["profile_layout"]) @@ -130,7 +200,7 @@ def set_layout(request): @login_required -def report(request): +def report(request: AuthedHttpRequest): if request.method == "GET": user_id = request.GET.get("user_id") if user_id: @@ -171,7 +241,7 @@ def report(request): @login_required -def manage_report(request): +def manage_report(request: AuthedHttpRequest): if not request.user.is_staff: raise PermissionDenied() if request.method == "GET": @@ -191,7 +261,7 @@ def manage_report(request): @login_required -def mark_announcements_read(request): +def mark_announcements_read(request: AuthedHttpRequest): if request.method == "POST": try: request.user.read_announcement_index = Announcement.objects.latest("pk").pk @@ -199,4 +269,4 @@ def mark_announcements_read(request): except ObjectDoesNotExist: # when there is no annoucenment pass - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))