diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b7c2055 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +.editorconfig +logs +.venv +start +config.yaml +registration.yaml +*.db +*.pickle diff --git a/.editorconfig b/.editorconfig index 5fb877e..a442021 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,20 +1,15 @@ -# EditorConfig is awesome: https://EditorConfig.org - -# top-most EditorConfig file root = true -# Unix-style newlines with a newline ending every file [*] +indent_style = tab +indent_size = 4 end_of_line = lf -insert_final_newline = true charset = utf-8 -indent_style = space -indent_size = 4 - -[*.py] -max_line_length = 99 +trim_trailing_whitespace = true +insert_final_newline = true -# Indentation override for all JSON/YAML files -[*.{json,yaml,yml}] +[*.{yaml,yml,sql}] indent_style = space -indent_size = 2 + +[{.gitlab-ci.yml,.github/workflows/*.yml}] +indent_size = 2 \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md new file mode 100644 index 0000000..18862a5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -0,0 +1,14 @@ +--- +name: Bug report +about: If something is definitely wrong in the bridge (rather than just a setup issue), + file a bug report. Remember to include relevant logs. +labels: bug + +--- + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..b387ee8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,7 @@ +contact_links: + - name: Troubleshooting docs & FAQ + url: https://docs.mau.fi/bridges/general/troubleshooting.html + about: Check this first if you're having problems setting up the bridge. + - name: Support room + url: https://matrix.to/#/#linkedin-matrix:nevarro.space + about: For setup issues not answered by the troubleshooting docs, ask in the Matrix room. diff --git a/.github/ISSUE_TEMPLATE/enhancement.md b/.github/ISSUE_TEMPLATE/enhancement.md new file mode 100644 index 0000000..264e67f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/enhancement.md @@ -0,0 +1,6 @@ +--- +name: Enhancement request +about: Submit a feature request or other suggestion +labels: enhancement + +--- diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index cbd920f..0000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 2 -updates: - - package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "weekly" - - package-ecosystem: "pip" - directory: "/" - schedule: - interval: "weekly" diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yml similarity index 62% rename from .github/workflows/deploy.yaml rename to .github/workflows/deploy.yml index 0c1f962..ab8b1c9 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yml @@ -1,4 +1,4 @@ -name: Lint, Build, and Deploy +name: Lint & Deploy on: push: @@ -10,64 +10,96 @@ on: - created env: - PYTHON_VERSION: 3.11 BEEPER_BRIDGE_TYPE: linkedin CI_REGISTRY_IMAGE: "${{ secrets.CI_REGISTRY }}/bridge/linkedin" GHCR_REGISTRY: ghcr.io GHCR_REGISTRY_IMAGE: "ghcr.io/${{ github.repository }}" + BINARY_NAME: linkedin-matrix + jobs: lint: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - uses: isort/isort-action@master - with: - sortPaths: "./linkedin_matrix" - - uses: psf/black@stable - with: - src: "./linkedin_matrix" - version: "22.3.0" - - name: pre-commit - run: | - pip install pre-commit - pre-commit run -av trailing-whitespace - pre-commit run -av end-of-file-fixer - pre-commit run -av check-yaml - pre-commit run -av check-added-large-files - - test: - name: Run the tests - runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + go-version: [ "1.22", "1.23.2" ] + name: Lint ${{ matrix.go-version == '1.23.2' && '(latest)' || '(old)' }} + steps: - uses: actions/checkout@v4 - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@v5 + + - name: Set up Go + uses: actions/setup-go@v5 with: - python-version: ${{ env.PYTHON_VERSION }} + go-version: ${{ matrix.go-version }} + cache: true - - name: Install dependencies - run: | - pip install -r requirements.txt - pip install -r dev-requirements.txt + - name: Install libolm + run: sudo apt-get install libolm-dev libolm3 - - name: Run pytest + - name: Install dependencies run: | - pytest -vv - pytest -vv > pytest-coverage.txt + go install golang.org/x/tools/cmd/goimports@latest + go install honnef.co/go/tools/cmd/staticcheck@latest + export PATH="$HOME/go/bin:$PATH" - - name: Comment coverage - uses: coroo/pytest-coverage-commentator@v1.0.2 - if: ${{ github.event_name == 'pull_request' && github.event.action == 'created' }} + - name: Run pre-commit + uses: pre-commit/action@v3.0.1 + build: + runs-on: ubuntu-latest + needs: lint + env: + GOPATH: ${{ github.workspace }}/.cache + GOCACHE: ${{ github.workspace }}/.cache/build + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: "1.23.2" + cache: true + + - name: Create cache directory + run: mkdir -p .cache + + - name: Get Mautrix Version + id: mautrix_version + run: echo "MAUTRIX_VERSION=$(grep 'maunium.net/go/mautrix ' go.mod | awk '{ print $2 }')" >> $GITHUB_ENV + + - name: Set LDFLAGS + run: | + echo "GO_LDFLAGS=-s -w -linkmode external -extldflags '-static' \ + -X main.Tag=${{ github.ref_name }} -X main.Commit=${{ github.sha }} \ + -X 'main.BuildTime=`date -Iseconds`' \ + -X 'maunium.net/go/mautrix.GoModVersion=${{ env.MAUTRIX_VERSION }}'" >> $GITHUB_ENV + + - name: Build binary + run: go build -ldflags "$GO_LDFLAGS" -o $BINARY_NAME ./cmd/$BINARY_NAME + + - name: Generate SHA256 checksum + run: sha256sum $BINARY_NAME | tee $BINARY_NAME.sha256sum + + - name: Upload binary artifact + uses: actions/upload-artifact@v3 + with: + name: $BINARY_NAME + path: $BINARY_NAME build-docker: runs-on: ubuntu-latest + needs: build steps: - - uses: actions/checkout@v4 + - name: Checkout code + uses: actions/checkout@v4 + + - name: Download binary artifact + uses: actions/download-artifact@v3 + with: + name: $BINARY_NAME + path: ./ - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -100,9 +132,7 @@ jobs: deploy-docker: runs-on: ubuntu-latest - needs: - - lint - - build-docker + needs: build-docker if: github.ref == 'refs/heads/master' steps: - name: Login to Beeper Docker registry @@ -138,18 +168,6 @@ jobs: BEEPER_STAGING_ADMIN_NIGHTLY_PASS: "${{ secrets.BEEPER_STAGING_ADMIN_NIGHTLY_PASS }}" BEEPER_PROD_ADMIN_NIGHTLY_PASS: "${{ secrets.BEEPER_PROD_ADMIN_NIGHTLY_PASS }}" - publish-pypi: - runs-on: ubuntu-latest - needs: - - lint - if: ${{ github.event_name == 'release' && github.event.action == 'created' }} - steps: - - uses: actions/checkout@v4 - - name: Publish to pypi - uses: pypa/gh-action-pypi-publish@v1.8.12 - with: - password: ${{ secrets.PYPI_TOKEN }} - tag-docker-release: runs-on: ubuntu-latest needs: diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000..255427f --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,29 @@ +name: 'Lock old issues' + +on: + schedule: + - cron: '0 18 * * *' + workflow_dispatch: + +permissions: + issues: write +# pull-requests: write +# discussions: write + +concurrency: + group: lock-threads + +jobs: + lock-stale: + runs-on: ubuntu-latest + steps: + - uses: dessant/lock-threads@v5 + id: lock + with: + issue-inactive-days: 90 + process-only: issues + - name: Log processed threads + run: | + if [ '${{ steps.lock.outputs.issues }}' ]; then + echo "Issues:" && echo '${{ steps.lock.outputs.issues }}' | jq -r '.[] | "https://github.com/\(.owner)/\(.repo)/issues/\(.issue_number)"' + fi diff --git a/.gitignore b/.gitignore index 56d0a6b..36c4535 100644 --- a/.gitignore +++ b/.gitignore @@ -1,170 +1,6 @@ -.direnv -.venv -.vim -docker-requirements.txt -linkedinmatrix.db* - -# Test caches so I don't get banned from LinkedIn -convocache.json -threadcache.json -synapse-files - -# Config files +./linkedin-matrix +logs/ +*.db* *.yaml +!example-config.yaml !.pre-commit-config.yaml -!linkedin_matrix/example-config.yaml -!.github/workflows/* - -# Created by https://www.toptal.com/developers/gitignore/api/python -# Edit at https://www.toptal.com/developers/gitignore?templates=python - -### Python ### -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -pytestdebug.log - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ -doc/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -#poetry.lock - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -# .env -.env/ -.venv/ -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ -pythonenv* - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# operating system-related files -# file properties cache/storage on macOS -*.DS_Store -# thumbnail cache on Windows -Thumbs.db - -# profiling data -.prof - - -# End of https://www.toptal.com/developers/gitignore/api/python diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa5b2db..464cbd1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,40 +1,28 @@ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 - hooks: - - id: trailing-whitespace - exclude_types: [markdown] - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + exclude_types: [markdown] + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files - # black - - repo: https://github.com/psf/black - rev: 24.2.0 - hooks: - - id: black - files: ^(cicd|linkedin_matrix)/.*\.pyi?$ + - repo: https://github.com/tekwizely/pre-commit-golang + rev: v1.0.0-rc.1 + hooks: + - id: go-imports-repo + args: + - "-local" + - "go.mau.fi/mautrix-twitter" + - "-w" + - id: go-vet-repo-mod + - id: go-staticcheck-repo-mod + - id: go-mod-tidy - # isort - - repo: https://github.com/PyCQA/isort - rev: 5.13.2 - hooks: - - id: isort - files: ^(cicd|linkedin_matrix|linkedin_messaging)/.*\.pyi?$ - - # flake8 - - repo: https://github.com/pycqa/flake8 - rev: 7.0.0 - hooks: - - id: flake8 - files: ^.*\.py$ - - # custom style checks - - repo: local - hooks: - - id: custom-style-check - name: custom style check - entry: ./cicd/custom_style_check.py - language: python - additional_dependencies: - - termcolor==2.1.1 + - repo: https://github.com/beeper/pre-commit-go + rev: v0.3.1 + hooks: + - id: zerolog-ban-msgf + - id: zerolog-use-stringer + - id: prevent-literal-http-methods diff --git a/Dockerfile b/Dockerfile index 915c1cc..f78d94e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,15 @@ -FROM docker.io/alpine:3.19 +FROM alpine:3.20 -ARG TARGETARCH=amd64 +ENV UID=1337 \ + GID=1337 -RUN apk add --no-cache \ - python3 py3-pip py3-setuptools py3-wheel \ - #py3-pillow \ - py3-aiohttp \ - py3-magic \ - py3-ruamel.yaml \ - py3-commonmark \ - py3-prometheus-client \ - py3-olm \ - py3-cffi \ - py3-pycryptodome \ - py3-unpaddedbase64 \ - py3-future \ - py3-aiohttp-socks \ - py3-pysocks \ - ca-certificates \ - su-exec \ - bash \ - curl \ - git \ - jq \ - yq - -COPY requirements.txt /opt/linkedin-matrix/requirements.txt -COPY optional-requirements.txt /opt/linkedin-matrix/optional-requirements.txt -WORKDIR /opt/linkedin-matrix - -RUN apk add --virtual .build-deps python3-dev libffi-dev build-base \ - && pip3 install --break-system-packages --no-cache-dir -r requirements.txt -r optional-requirements.txt \ - && apk del .build-deps - -COPY . /opt/linkedin-matrix -RUN apk add --no-cache git && pip3 install --break-system-packages --no-cache-dir .[e2be] && apk del git \ - # This doesn't make the image smaller, but it's needed so that the `version` command works properly - && cp linkedin_matrix/example-config.yaml . && rm -rf linkedin_matrix .git build +RUN apk add --no-cache ffmpeg su-exec ca-certificates bash jq curl yq-go +ARG EXECUTABLE=./cmd/linkedin-matrix +COPY $EXECUTABLE /usr/bin/linkedin-matrix +COPY ./docker-run.sh /docker-run.sh +ENV BRIDGEV2=1 VOLUME /data -ENV UID=1337 GID=1337 +WORKDIR /data -CMD ["/opt/linkedin-matrix/docker-run.sh"] +CMD ["/docker-run.sh"] diff --git a/ROADMAP.md b/ROADMAP.md index 3cc158f..5d46382 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -2,24 +2,24 @@ * Matrix → LinkedIn * [ ] Message content - * [x] Text + * [ ] Text * [ ] Media - * [x] Files - * [x] Images - * [x] Videos - * [x] GIFs + * [ ] Files + * [ ] Images + * [ ] Videos + * [ ] GIFs * [ ] Voice Messages * [ ] Stickers * [ ] ~~Formatting~~ (LinkedIn does not support rich formatting) * [ ] ~~Replies~~ (LinkedIn does not support replies) - * [x] Mentions - * [x] Emotes - * [x] Message redactions - * [x] Message reactions + * [ ] Mentions + * [ ] Emotes + * [ ] Message redactions + * [ ] Message reactions * [ ] Presence - * [x] Typing notifications - * [x] Mark conversations as read - * [x] Read receipts + * [ ] Typing notifications + * [ ] Mark conversations as read + * [ ] Read receipts * [ ] Power level * [ ] Membership actions * [ ] Invite @@ -30,39 +30,39 @@ * [ ] Avatar * [ ] Per-room user nick * LinkedIn → Matrix - * [x] Message content - * [x] Text - * [x] Media - * [x] Files - * [x] Images - * [x] GIFs - * [x] Voice Messages - * [x] Mentions - * [x] Message delete - * [x] Message reactions - * [x] Message history - * [x] Real-time messages + * [ ] Message content + * [ ] Text + * [ ] Media + * [ ] Files + * [ ] Images + * [ ] GIFs + * [ ] Voice Messages + * [ ] Mentions + * [ ] Message delete + * [ ] Message reactions + * [ ] Message history + * [ ] Real-time messages * [ ] Presence - * [x] Typing notifications - * [x] Read receipts - * [x] Admin status + * [ ] Typing notifications + * [ ] Read receipts + * [ ] Admin status * [ ] Membership actions * [ ] Add member * [ ] Remove member * [ ] Leave * [ ] Chat metadata changes - * [x] Title + * [ ] Title * [ ] Avatar * [ ] Initial chat metadata - * [x] User metadata - * [x] Name - * [x] Avatar + * [ ] User metadata + * [ ] Name + * [ ] Avatar * Misc * [ ] Multi-user support * [ ] Shared group chat portals - * [x] Automatic portal creation - * [x] At startup - * [x] When added to chat - * [x] When receiving message + * [ ] Automatic portal creation + * [ ] At startup + * [ ] When added to chat + * [ ] When receiving message * [ ] Private chat creation by inviting Matrix puppet of LinkedIn user to new room - * [x] Option to use own Matrix account for messages sent from other LinkedIn clients + * [ ] Option to use own Matrix account for messages sent from other LinkedIn clients diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..6f35bdf --- /dev/null +++ b/build.sh @@ -0,0 +1,4 @@ +#!/bin/sh +MAUTRIX_VERSION=$(cat go.mod | grep 'maunium.net/go/mautrix ' | awk '{ print $2 }' | head -n1) +GO_LDFLAGS="-s -w -X main.Tag=$(git describe --exact-match --tags 2>/dev/null) -X main.Commit=$(git rev-parse HEAD) -X 'main.BuildTime=`date -Iseconds`' -X 'maunium.net/go/mautrix.GoModVersion=$MAUTRIX_VERSION'" +go build -ldflags="$GO_LDFLAGS" ./cmd/linkedin-matrix "$@" diff --git a/cicd/custom_style_check.py b/cicd/custom_style_check.py deleted file mode 100755 index 6507cb0..0000000 --- a/cicd/custom_style_check.py +++ /dev/null @@ -1,71 +0,0 @@ -#! /usr/bin/env python - -""" -Checks for TODO comments and makes sure they have an associated issue. Formats that are -accepted are: - -TODO (#1) -TODO (#1) -TODO (project#1) -TODO (namespace/project#1) -TODO (namespace/namespace/project#1) - -Additionally, the TODO can be postfixed with ``:``. -""" - -from typing import Pattern -from pathlib import Path -import re -import sys - -from termcolor import cprint - -todo_re = re.compile(r"\s*#\s*TODO:?\s*") -accounted_for_todo = re.compile(r"\s*#\s*TODO:?\s*\(([\w-]+(/[\w-]+)*)?#\d+\)") - - -def noqa_re(error_id: str = "") -> Pattern: - return re.compile(rf"#\s*noqa(:\s*{error_id})?\s*\n$") - - -def eprint(*strings: str): - cprint(" ".join(strings), "red", end="", attrs=["bold"]) - - -def check_file(path: Path) -> bool: - print(f"Checking {path.absolute()}...") # noqa: T001 - file = path.open() - valid = True - - for i, line in enumerate(file, start=1): - if todo_re.match(line) and not accounted_for_todo.match(line): - eprint(f"{i}: {line}") - valid = False - - file.close() - return valid - - -valid = True -for path in Path("linkedin_matrix").glob("**/*.py"): - valid &= check_file(path) - -for path in Path("tests").glob("**/*.py"): - valid &= check_file(path) - -""" -Checks that the version in the CHANGELOG is the same as the version in ``__init__.py``. -""" -with open(Path("linkedin_matrix/__init__.py")) as f: - for line in f: - if line.startswith("__version__"): - version = eval(line.split()[-1]) - break - else: # nobreak - raise AssertionError("No version in linkedin_matrix/__init__.py") - -with open(Path("CHANGELOG.md")) as f: - assert f.readline().strip() == f"# v{version}", "Version mismatch: CHANGELOG" - - -sys.exit(0 if valid else 1) diff --git a/cmd/linkedin-matrix/legacymigrate.go b/cmd/linkedin-matrix/legacymigrate.go new file mode 100644 index 0000000..0f192ad --- /dev/null +++ b/cmd/linkedin-matrix/legacymigrate.go @@ -0,0 +1,9 @@ +package main + +import ( + up "go.mau.fi/util/configupgrade" +) + +func migrateLegacyConfig(helper up.Helper) { + helper.Set(up.Str, "mautrix.bridge.e2ee", "encryption", "pickle_key") +} diff --git a/cmd/linkedin-matrix/legacyprovision.go b/cmd/linkedin-matrix/legacyprovision.go new file mode 100644 index 0000000..e5cd505 --- /dev/null +++ b/cmd/linkedin-matrix/legacyprovision.go @@ -0,0 +1,123 @@ +package main + +import ( + "context" + "encoding/json" + "errors" + "net/http" + + "github.com/rs/zerolog" + "maunium.net/go/mautrix" + "maunium.net/go/mautrix/bridge/status" + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/bridgeconfig" + + "github.com/beeper/linkedin/pkg/connector" +) + +func jsonResponse(w http.ResponseWriter, status int, response any) { + w.Header().Add("Content-Type", "application/json") + w.WriteHeader(status) + _ = json.NewEncoder(w).Encode(response) +} + +type Error struct { + Success bool `json:"success"` + Error string `json:"error"` + ErrCode string `json:"errcode"` +} + +type Response struct { + Success bool `json:"success"` + Status string `json:"status"` +} + +var levelsToNames = map[bridgeconfig.Permissions]string{ + bridgeconfig.PermissionLevelBlock: "block", + bridgeconfig.PermissionLevelRelay: "relay", + bridgeconfig.PermissionLevelCommands: "commands", + bridgeconfig.PermissionLevelUser: "user", + bridgeconfig.PermissionLevelAdmin: "admin", +} + +func legacyProvStatus(w http.ResponseWriter, r *http.Request) { + user := m.Matrix.Provisioning.GetUser(r) + response := map[string]any{ + "permissions": levelsToNames[user.Permissions], + "mxid": user.MXID.String(), + } + + ul := user.GetDefaultLogin() + if ul.ID != "" { // if logged in + linClient := connector.NewLinkedInClient(r.Context(), c, ul) + + currentUser, err := linClient.GetCurrentUser() + if err == nil { + response["linkedin"] = currentUser + } + } + + jsonResponse(w, http.StatusOK, response) +} + +func legacyProvLogin(w http.ResponseWriter, r *http.Request) { + user := m.Matrix.Provisioning.GetUser(r) + ctx := r.Context() + var body map[string]map[string]string + err := json.NewDecoder(r.Body).Decode(&body) + if err != nil { + jsonResponse(w, http.StatusBadRequest, Error{ErrCode: mautrix.MBadJSON.ErrCode, Error: err.Error()}) + return + } + cookieString := body["all_headers"]["Cookie"] + + lp, err := c.CreateLogin(ctx, user, "cookies") + if err != nil { + zerolog.Ctx(ctx).Err(err).Msg("Failed to create login") + jsonResponse(w, http.StatusInternalServerError, Error{ErrCode: "M_UNKNOWN", Error: "Internal error creating login"}) + } else if firstStep, err := lp.Start(ctx); err != nil { + zerolog.Ctx(ctx).Err(err).Msg("Failed to start login") + jsonResponse(w, http.StatusInternalServerError, Error{ErrCode: "M_UNKNOWN", Error: "Internal error starting login"}) + } else if firstStep.StepID != connector.LoginStepIDCookies { + jsonResponse(w, http.StatusInternalServerError, Error{ErrCode: "M_UNKNOWN", Error: "Unexpected login step"}) + } else if !connector.ValidCookieRegex.MatchString(cookieString) { + jsonResponse(w, http.StatusOK, nil) + } else if finalStep, err := lp.(bridgev2.LoginProcessCookies).SubmitCookies(ctx, map[string]string{ + "cookie": cookieString, + }); err != nil { + zerolog.Ctx(ctx).Err(err).Msg("Failed to log in") + var respErr bridgev2.RespError + if errors.As(err, &respErr) { + jsonResponse(w, respErr.StatusCode, &respErr) + } else { + jsonResponse(w, http.StatusInternalServerError, Error{ErrCode: "M_UNKNOWN", Error: "Internal error logging in"}) + } + } else if finalStep.StepID != connector.LoginStepIDComplete { + jsonResponse(w, http.StatusInternalServerError, Error{ErrCode: "M_UNKNOWN", Error: "Unexpected login step"}) + } else { + jsonResponse(w, http.StatusOK, map[string]any{}) + go handleLoginComplete(context.WithoutCancel(ctx), user, finalStep.CompleteParams.UserLogin) + } +} + +func handleLoginComplete(ctx context.Context, user *bridgev2.User, newLogin *bridgev2.UserLogin) { + allLogins := user.GetUserLogins() + for _, login := range allLogins { + if login.ID != newLogin.ID { + login.Delete(ctx, status.BridgeState{StateEvent: status.StateLoggedOut, Reason: "LOGIN_OVERRIDDEN"}, bridgev2.DeleteOpts{}) + } + } +} + +func legacyProvLogout(w http.ResponseWriter, r *http.Request) { + user := m.Matrix.Provisioning.GetUser(r) + logins := user.GetUserLogins() + for _, login := range logins { + // Intentionally don't delete the user login, only disconnect the client + login.Client.(*connector.LinkedInClient).LogoutRemote(r.Context()) + } + jsonResponse(w, http.StatusOK, Response{ + Success: true, + Status: "logged_out", + }) +} diff --git a/cmd/linkedin-matrix/main.go b/cmd/linkedin-matrix/main.go new file mode 100644 index 0000000..91f55df --- /dev/null +++ b/cmd/linkedin-matrix/main.go @@ -0,0 +1,41 @@ +package main + +import ( + "net/http" + + "maunium.net/go/mautrix/bridgev2/bridgeconfig" + "maunium.net/go/mautrix/bridgev2/matrix/mxmain" + + "github.com/beeper/linkedin/pkg/connector" +) + +// Information to find out exactly which commit the bridge was built from. +// These are filled at build time with the -X linker flag. +var ( + Tag = "unknown" + Commit = "unknown" + BuildTime = "unknown" +) + +var c = &connector.LinkedInConnector{} +var m = mxmain.BridgeMain{ + Name: "mautrix-twitter", + URL: "https://github.com/mautrix/twitter", + Description: "A Matrix-Twitter puppeting bridge.", + Version: "0.2.0", + Connector: c, +} + +func main() { + bridgeconfig.HackyMigrateLegacyNetworkConfig = migrateLegacyConfig + m.PostStart = func() { + if m.Matrix.Provisioning != nil { + m.Matrix.Provisioning.Router.HandleFunc("/v1/api/whoami", legacyProvStatus).Methods(http.MethodGet) + m.Matrix.Provisioning.Router.HandleFunc("/v1/api/login", legacyProvLogin).Methods(http.MethodPost) + m.Matrix.Provisioning.Router.HandleFunc("/v1/api/logout", legacyProvLogout).Methods(http.MethodPost) + } + } + + m.InitVersion(Tag, Commit, BuildTime) + m.Run() +} diff --git a/dev-requirements.txt b/dev-requirements.txt deleted file mode 100644 index 4601559..0000000 --- a/dev-requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -black>=23,<24 -flake8-annotations>=2.7.0,<4 -flake8-isort>=6.0.0,<7 -flake8-print>=5.0.0,<6 -flake8>=6.0.0,<7 -isort>=5.10.1,<6 -pre-commit>=2.10.1,<4 -pytest>=7.4.3,<7.5 -termcolor>=2.1.1,<3 diff --git a/docker-run.sh b/docker-run.sh index 3c4f5df..7476123 100755 --- a/docker-run.sh +++ b/docker-run.sh @@ -1,31 +1,32 @@ #!/bin/sh -[ -z $CONFIG_PATH ] && CONFIG_PATH="/data/config.yaml" -[ -z $REGISTRATION_PATH ] && REGISTRATION_PATH="/data/registration.yaml" +if [[ -z "$GID" ]]; then + GID="$UID" +fi + +BINARY_NAME=/usr/bin/linkedin-matrix -# Define functions. function fixperms { - chown -R $UID:$GID $CONFIG_PATH $REGISTRATION_PATH + chown -R $UID:$GID /data } -cd /opt/linkedin-matrix - -if [ ! -f $CONFIG_PATH ]; then - cp example-config.yaml $CONFIG_PATH - sed -i "s#hostname: localhost#hostname: 0.0.0.0#" $CONFIG_PATH +if [[ ! -f /data/config.yaml ]]; then + $BINARY_NAME -c /data/config.yaml -e echo "Didn't find a config file." - echo "Copied default config file to $CONFIG_PATH" + echo "Copied default config file to /data/config.yaml" echo "Modify that config file to your liking." echo "Start the container again after that to generate the registration file." - fixperms exit fi -if [ ! -f $REGISTRATION_PATH ]; then - python3 -m linkedin_matrix -g -c $CONFIG_PATH -r $REGISTRATION_PATH - fixperms +if [[ ! -f /data/registration.yaml ]]; then + $BINARY_NAME -g -c /data/config.yaml -r /data/registration.yaml + echo "Didn't find a registration file." + echo "Generated one for you." + echo "See https://docs.mau.fi/bridges/general/registering-appservices.html on how to use it." exit fi +cd /data fixperms -exec su-exec $UID:$GID python3 -m linkedin_matrix -c $CONFIG_PATH +exec su-exec $UID:$GID $BINARY_NAME diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..0989d3a --- /dev/null +++ b/go.mod @@ -0,0 +1,43 @@ +module github.com/beeper/linkedin + +go 1.22.0 + +toolchain go1.22.3 + +require github.com/rs/zerolog v1.33.0 + +require ( + github.com/google/go-querystring v1.1.0 + github.com/google/uuid v1.6.0 + github.com/mattn/go-colorable v0.1.13 + go.mau.fi/util v0.8.1 + golang.org/x/net v0.30.0 + gopkg.in/yaml.v3 v3.0.1 + maunium.net/go/mautrix v0.21.1 +) + +require ( + filippo.io/edwards25519 v1.1.0 // indirect + github.com/coreos/go-systemd/v22 v22.5.0 // indirect + github.com/gorilla/mux v1.8.0 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/lib/pq v1.10.9 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-sqlite3 v1.14.24 // indirect + github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 // indirect + github.com/rs/xid v1.6.0 // indirect + github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e // indirect + github.com/tidwall/gjson v1.18.0 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.0 // indirect + github.com/tidwall/sjson v1.2.5 // indirect + github.com/yuin/goldmark v1.7.7 // indirect + go.mau.fi/zeroconfig v0.1.3 // indirect + golang.org/x/crypto v0.28.0 // indirect + golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect + gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect + maunium.net/go/mauflag v1.0.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..a662c85 --- /dev/null +++ b/go.sum @@ -0,0 +1,84 @@ +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= +github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= +github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= +github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM= +github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 h1:Dx7Ovyv/SFnMFw3fD4oEoeorXc6saIiQ23LrGLth0Gw= +github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU= +github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0= +github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= +github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= +github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0= +github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= +github.com/yuin/goldmark v1.7.7 h1:5m9rrB1sW3JUMToKFQfb+FGt1U7r57IHu5GrYrG2nqU= +github.com/yuin/goldmark v1.7.7/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= +go.mau.fi/util v0.8.1 h1:Ga43cz6esQBYqcjZ/onRoVnYWoUwjWbsxVeJg2jOTSo= +go.mau.fi/util v0.8.1/go.mod h1:T1u/rD2rzidVrBLyaUdPpZiJdP/rsyi+aTzn0D+Q6wc= +go.mau.fi/zeroconfig v0.1.3 h1:As9wYDKmktjmNZW5i1vn8zvJlmGKHeVxHVIBMXsm4kM= +go.mau.fi/zeroconfig v0.1.3/go.mod h1:NcSJkf180JT+1IId76PcMuLTNa1CzsFFZ0nBygIQM70= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= +gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +maunium.net/go/mauflag v1.0.0 h1:YiaRc0tEI3toYtJMRIfjP+jklH45uDHtT80nUamyD4M= +maunium.net/go/mauflag v1.0.0/go.mod h1:nLivPOpTpHnpzEh8jEdSL9UqO9+/KBJFmNRlwKfkPeA= +maunium.net/go/mautrix v0.21.1 h1:Z+e448jtlY977iC1kokNJTH5kg2WmDpcQCqn+v9oZOA= +maunium.net/go/mautrix v0.21.1/go.mod h1:7F/S6XAdyc/6DW+Q7xyFXRSPb6IjfqMb1OMepQ8C8OE= diff --git a/linkedin_matrix/__init__.py b/linkedin_matrix/__init__.py deleted file mode 100644 index f035a6d..0000000 --- a/linkedin_matrix/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = "0.5.4" -__author__ = "Sumner Evans " diff --git a/linkedin_matrix/__main__.py b/linkedin_matrix/__main__.py deleted file mode 100644 index 5cc361c..0000000 --- a/linkedin_matrix/__main__.py +++ /dev/null @@ -1,137 +0,0 @@ -from __future__ import annotations - -from typing import Any - -from mautrix.bridge import Bridge -from mautrix.bridge.state_store.asyncpg import PgBridgeStateStore -from mautrix.types import RoomID, UserID -from mautrix.util.async_db import Database - -from . import commands as _ # noqa: F401 -from .analytics import init as init_analytics -from .config import Config -from .db import init as init_db, upgrade_table -from .matrix import MatrixHandler -from .portal import Portal # noqa: I100 (needs to be after because it relies on Puppet) -from .puppet import Puppet -from .user import User -from .version import linkified_version, version -from .web import ProvisioningAPI - - -class LinkedInBridge(Bridge): - name = "linkedin-matrix" - module = "linkedin_matrix" - beeper_service_name = "linkedin" - beeper_network_name = "linkedin" - command = "linkedin-matrix" - description = "A Matrix-LinkedIn Messages puppeting bridge." - repo_url = "https://github.com/beeper/linkedin" - version = version - markdown_version = linkified_version - config_class = Config - matrix_class = MatrixHandler - - config: Config - db: Database - matrix: MatrixHandler - provisioning_api: ProvisioningAPI - state_store: PgBridgeStateStore - - def make_state_store(self): - self.state_store = PgBridgeStateStore( - self.db, - self.get_puppet, - self.get_double_puppet, - ) - - def prepare_db(self): - self.db = Database.create( - self.config["appservice.database"], - upgrade_table=upgrade_table, - db_args=self.config["appservice.database_opts"], - ) - init_db(self.db) - - def prepare_stop(self): - # self.periodic_reconnect_task.cancel() - self.log.debug("Stopping puppet syncers") - for puppet in Puppet.by_custom_mxid.values(): - puppet.stop() - self.log.debug("Stopping LinkedIn listeners") - User.shutdown = True - for user in User.by_li_member_urn.values(): - user.stop_listen() - - def prepare_bridge(self): - super().prepare_bridge() - if self.config["appservice.provisioning.enabled"]: - secret = self.config["appservice.provisioning.shared_secret"] - prefix = self.config["appservice.provisioning.prefix"] - self.provisioning_api = ProvisioningAPI(secret) - self.az.app.add_subapp(prefix, self.provisioning_api.app) - - if self.config["analytics.token"]: - host = self.config["analytics.host"] - token = self.config["analytics.token"] - user_id = self.config["analytics.user_id"] - if token: - init_analytics(host, token, user_id) - - async def stop(self): - await Puppet.close() - self.log.debug("Saving user sessions") - for user in User.by_mxid.values(): - await user.save() - await super().stop() - await self.db.stop() - - async def start(self): - self.add_startup_actions(User.init_cls(self)) - self.add_startup_actions(Puppet.init_cls(self)) - Portal.init_cls(self) - if self.config["bridge.resend_bridge_info"]: - self.add_startup_actions(self.resend_bridge_info()) - await super().start() - - async def resend_bridge_info(self): - self.config["bridge.resend_bridge_info"] = False - self.config.save() - self.log.info("Re-sending bridge info state event to all portals") - async for portal in Portal.all(): - await portal.update_bridge_info() - self.log.info("Finished re-sending bridge info state events") - - async def get_portal(self, room_id: RoomID) -> Portal: - return await Portal.get_by_mxid(room_id) - - async def get_puppet(self, user_id: UserID, create: bool = False) -> Puppet | None: - return await Puppet.get_by_mxid(user_id, create=create) - - async def get_double_puppet(self, user_id: UserID) -> Puppet: - return await Puppet.get_by_custom_mxid(user_id) - - async def get_user(self, user_id: UserID, create: bool = True) -> User: - return await User.get_by_mxid(user_id, create=create) - - def is_bridge_ghost(self, user_id: UserID) -> bool: - return bool(Puppet.get_id_from_mxid(user_id)) - - async def count_logged_in_users(self) -> int: - return len([user for user in User.by_li_member_urn.values() if user.li_member_urn]) - - async def manhole_global_namespace(self, user_id: UserID) -> dict[str, Any]: - return { - **await super().manhole_global_namespace(user_id), - "User": User, - "Portal": Portal, - "Puppet": Puppet, - } - - -def main(): - LinkedInBridge().run() - - -if __name__ == "__main__": - main() diff --git a/linkedin_matrix/analytics.py b/linkedin_matrix/analytics.py deleted file mode 100644 index 980aac1..0000000 --- a/linkedin_matrix/analytics.py +++ /dev/null @@ -1,48 +0,0 @@ -from __future__ import annotations - -import logging - -from yarl import URL -import aiohttp - -from mautrix.util import background_task - -from . import user as u - -log = logging.getLogger("mau.web.public.analytics") -http: aiohttp.ClientSession | None = None -analytics_url: URL | None = None -analytics_token: str | None = None -analytics_user_id: str | None = None - - -async def _track(user: u.User, event: str, properties: dict) -> None: - assert analytics_token - assert analytics_url - assert http - await http.post( - analytics_url, - json={ - "userId": analytics_user_id or user.mxid, - "event": event, - "properties": {"bridge": "linkedin", **properties}, - }, - auth=aiohttp.BasicAuth(login=analytics_token, encoding="utf-8"), - ) - log.debug(f"Tracked {event}") - - -def track(user: u.User, event: str, properties: dict | None = None): - if analytics_token: - background_task.create(_track(user, event, properties or {})) - - -def init(base_url: str | None, token: str | None, user_id: str | None = None): - if not base_url or not token: - return - log.info("Initialising segment-compatible analytics") - global analytics_url, analytics_token, analytics_user_id, http - analytics_url = URL.build(scheme="https", host=base_url, path="/v1/track") - analytics_token = token - analytics_user_id = user_id - http = aiohttp.ClientSession() diff --git a/linkedin_matrix/commands/__init__.py b/linkedin_matrix/commands/__init__.py deleted file mode 100644 index be5366e..0000000 --- a/linkedin_matrix/commands/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .auth import SECTION_AUTH, login - -__all__ = ("SECTION_AUTH", "login") diff --git a/linkedin_matrix/commands/auth.py b/linkedin_matrix/commands/auth.py deleted file mode 100644 index ea7b992..0000000 --- a/linkedin_matrix/commands/auth.py +++ /dev/null @@ -1,169 +0,0 @@ -import logging -import re - -from mautrix.bridge.commands import HelpSection, command_handler - -from .typehint import CommandEvent - -SECTION_AUTH = HelpSection("Authentication", 10, "") - -missing_email = "Please use `$cmdprefix+sp login ` to log in here." -send_password = "Please send your password here to log in." -send_2fa_code = "Please send the PIN in your inbox here to complete login." - -# LinkedIn Login URLs -SEED_URL = "https://www.linkedin.com/uas/login" -LOGIN_URL = "https://www.linkedin.com/checkpoint/lg/login-submit" -VERIFY_URL = "https://www.linkedin.com/checkpoint/challenge/verify" - - -@command_handler( - needs_auth=False, - management_only=False, - help_section=SECTION_AUTH, - help_text="See authentication status", -) -async def whoami(evt: CommandEvent): - assert evt.sender - user_profile = evt.sender.user_profile_cache - if user_profile is not None: - logging.debug("Cache hit on user_profile_cache") - elif not evt.sender.client or not await evt.sender.client.logged_in(): - await evt.reply("You are not logged in") - return - assert evt.sender.client - - user_profile = user_profile or await evt.sender.client.get_user_profile() - evt.sender.user_profile_cache = user_profile - if mini_profile := user_profile.mini_profile: - first = mini_profile.first_name - last = mini_profile.last_name - name = f"{first} {last}" - elif plain_id := user_profile.plain_id: - name = plain_id - else: - await evt.reply("You are not logged in") - return - - await evt.reply(f"You are logged in as {name}") - - -# region Login - - -@command_handler( - needs_auth=False, - management_only=False, - help_section=SECTION_AUTH, - help_text=""" - Log in to LinkedIn using cookies from an existing LinkedIn browser session. To extract the - cookies go to your browser developer tools, open the Network tab, then copy the `Cookie` - header from one of the requests to `https://www.linkedin.com/` and paste the result into - the command. It is recommended that you use a private window to extract the cookies. - """, - help_args="<_cookie header_>", -) -async def login(evt: CommandEvent): - if evt.sender.client and await evt.sender.client.logged_in(): - await evt.reply("You're already logged in.") - return - - if len(evt.args) == 0: - await evt.reply("**Usage:** `$cmdprefix+sp login `") - return - - await evt.redact() - - cookies: dict[str, str] = {} - for cookie in evt.args: - key, val = cookie.strip(" ;").split("=", 1) - cookies[key] = val - - if not cookies.get("li_at") or not cookies.get("JSESSIONID"): - await evt.reply("Missing li_at or JSESSIONID cookie") - return - - try: - await evt.sender.on_logged_in(cookies, None) - await evt.reply("Successfully logged in") - except Exception as e: - logging.exception("Failed to log in") - await evt.reply(f"Failed to log in: {e}") - return - - -@command_handler( - needs_auth=False, - management_only=False, - help_section=SECTION_AUTH, - help_text=""" - Log in to LinkedIn using a "Copy as cURL" export from an existing LinkedIn browser session. - """, - help_args="<_curl command_>", -) -async def login_curl(evt: CommandEvent): - # if evt.sender.client and await evt.sender.client.logged_in(): - # await evt.reply("You're already logged in.") - # return - - if len(evt.args) == 0: - await evt.reply("**Usage:** `$cmdprefix+sp login-curl `") - return - - await evt.redact() - - curl_command = " ".join(evt.args) - - cookies: dict[str, str] = {} - headers: dict[str, str] = {} - - curl_command_regex = r"-H '(?P[^:]+): (?P[^\']+)'" - header_matches = re.findall(curl_command_regex, curl_command) - for m in header_matches: - (name, value) = m - - if name.lower() == "cookie": - cookie_items = value.split("; ") - for c in cookie_items: - n, v = c.split("=", 1) - cookies[n] = v - elif name.lower() == "accept": - # Every request will have a different value for this - pass - else: - headers[name] = value - - if not cookies.get("li_at") or not cookies.get("JSESSIONID"): - await evt.reply("Missing li_at or JSESSIONID cookie") - return - - try: - await evt.sender.on_logged_in(cookies, headers) - await evt.reply("Successfully logged in") - except Exception as e: - logging.exception("Failed to log in") - await evt.reply(f"Failed to log in: {e}") - return - - -# endregion - -# region Log out - - -@command_handler( - needs_auth=False, - management_only=False, - help_section=SECTION_AUTH, - help_text="Log out of LinkedIn", -) -async def logout(evt: CommandEvent): - if not evt.sender.client or not await evt.sender.client.logged_in(): - await evt.reply("You are not logged in.") - return - - await evt.sender.logout() - await evt.reply("Successfully logged out") - - -# endregion diff --git a/linkedin_matrix/commands/typehint.py b/linkedin_matrix/commands/typehint.py deleted file mode 100644 index 8b9a924..0000000 --- a/linkedin_matrix/commands/typehint.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import TYPE_CHECKING - -from mautrix.bridge.commands import CommandEvent as BaseCommandEvent - -if TYPE_CHECKING: - from ..__main__ import LinkedInBridge - from ..user import User - - -class CommandEvent(BaseCommandEvent): - bridge: "LinkedInBridge" - sender: "User" diff --git a/linkedin_matrix/config.py b/linkedin_matrix/config.py deleted file mode 100644 index 6739bf3..0000000 --- a/linkedin_matrix/config.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import Any -import os - -from mautrix.bridge.config import BaseBridgeConfig -from mautrix.types import UserID -from mautrix.util.config import ConfigUpdateHelper, ForbiddenDefault, ForbiddenKey - - -class Config(BaseBridgeConfig): - def __getitem__(self, key: str) -> Any: - try: - return os.environ[f"MAUTRIX_LINKEDIN_{key.replace('.', '_').upper()}"] - except KeyError: - return super().__getitem__(key) - - @property - def forbidden_defaults(self) -> list[ForbiddenDefault]: - return [ - *super().forbidden_defaults, - ForbiddenDefault("appservice.database", "postgres://username:password@hostname/db"), - ForbiddenDefault("bridge.permissions", ForbiddenKey("example.com")), - ] - - def do_update(self, helper: ConfigUpdateHelper): - super().do_update(helper) - copy, copy_dict, base = helper.copy, helper.copy_dict, helper.base - - # appservice - copy("appservice.bot_avatar") - copy("appservice.provisioning.enabled") - copy("appservice.provisioning.prefix") - copy("appservice.provisioning.shared_secret") - if base["appservice.provisioning.shared_secret"] == "generate": - base["appservice.provisioning.shared_secret"] = self._new_token() - - # bridge - copy("bridge.backfill.disable_notifications") - copy("bridge.backfill.initial_limit") - copy("bridge.backfill.invite_own_puppet") - copy("bridge.backfill.missed_limit") - copy("bridge.backfill.unread_hours_threshold") - copy("bridge.command_prefix") - copy("bridge.delivery_receipts") - copy("bridge.displayname_preference") - copy("bridge.displayname_template") - copy("bridge.double_puppet_allow_discovery") - copy("bridge.double_puppet_server_map") - copy("bridge.space_support.enable") - copy("bridge.space_support.name") - copy("bridge.federate_rooms") - copy("bridge.initial_chat_sync") - copy("bridge.invite_own_puppet_to_pm") - copy("bridge.mute_bridging") - copy("bridge.resend_bridge_info") - copy("bridge.set_topic_on_dms") - copy("bridge.sync_direct_chat_list") - copy("bridge.sync_with_custom_puppets") - copy("bridge.tag_only_on_create") - copy("bridge.temporary_disconnect_notices") - copy("bridge.username_template") - - if "bridge.login_shared_secret" in self: - base["bridge.login_shared_secret_map"] = { - base["homeserver.domain"]: self["bridge.login_shared_secret"] - } - else: - copy("bridge.login_shared_secret_map") - - copy_dict("bridge.permissions") - - copy("bridge.private_chat_portal_meta") - if base["bridge.private_chat_portal_meta"] not in ("default", "always", "never"): - base["bridge.private_chat_portal_meta"] = "default" - - # analytics - copy("analytics.host") - if "appservice.provisioning.segment_key" in self: - base["analytics.token"] = self["appservice.provisioning.segment_key"] - else: - copy("analytics.token") - if "appservice.provisioning.segment_user_id" in self: - base["analytics.user_id"] = self["appservice.provisioning.segment_user_id"] - else: - copy("analytics.user_id") - - # Metrics - copy("metrics.enabled") - copy("metrics.listen_port") - - def _get_permissions(self, key: str) -> tuple[bool, bool, str]: - level = self["bridge.permissions"].get(key, "") - admin = level == "admin" - user = level == "user" or admin - return user, admin, level - - def get_permissions(self, mxid: UserID) -> tuple[bool, bool, str]: - permissions = self["bridge.permissions"] or {} - if mxid in permissions: - return self._get_permissions(mxid) - - homeserver = mxid[mxid.index(":") + 1 :] - if homeserver in permissions: - return self._get_permissions(homeserver) - - return self._get_permissions("*") diff --git a/linkedin_matrix/db/__init__.py b/linkedin_matrix/db/__init__.py deleted file mode 100644 index 375c695..0000000 --- a/linkedin_matrix/db/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -from mautrix.util.async_db import Database - -from .cookie import Cookie -from .http_header import HttpHeader -from .message import Message -from .model_base import Model -from .portal import Portal -from .puppet import Puppet -from .reaction import Reaction -from .upgrade import upgrade_table -from .user import User -from .user_portal import UserPortal - - -def init(db: Database): - for table in (HttpHeader, Cookie, Message, Portal, Puppet, Reaction, User, UserPortal): - table.db = db # type: ignore - - -__all__ = ( - "init", - "upgrade_table", - # Models - "HttpHeader", - "Cookie", - "Message", - "Model", - "Portal", - "Puppet", - "Reaction", - "User", - "UserPortal", -) diff --git a/linkedin_matrix/db/cookie.py b/linkedin_matrix/db/cookie.py deleted file mode 100644 index 61fbc78..0000000 --- a/linkedin_matrix/db/cookie.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -from asyncpg import Record -from attr import dataclass - -from mautrix.types import UserID - -from .model_base import Model - - -@dataclass -class Cookie(Model): - mxid: UserID - name: str - value: str - - _table_name = "cookie" - _field_list = [ - "mxid", - "name", - "value", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> Cookie | None: - if row is None: - return None - return cls(**row) - - @classmethod - async def get_for_mxid(cls, mxid: id.UserID) -> list[Cookie]: - query = Cookie.select_constructor("mxid=$1") - rows = await cls.db.fetch(query, mxid) - return [cls._from_row(row) for row in rows if row] - - @classmethod - async def delete_all_for_mxid(cls, mxid: id.UserID): - await cls.db.execute("DELETE FROM cookie WHERE mxid=$1", mxid) - - @classmethod - async def bulk_upsert(cls, mxid: id.UserID, cookies: dict[str, str]): - for name, value in cookies.items(): - cookie = cls(mxid, name, value) - await cookie.upsert() - - async def upsert(self): - query = """ - INSERT INTO cookie (mxid, name, value) - VALUES ($1, $2, $3) - ON CONFLICT (mxid, name) - DO UPDATE - SET value=excluded.value - """ - await self.db.execute(query, self.mxid, self.name, self.value) diff --git a/linkedin_matrix/db/http_header.py b/linkedin_matrix/db/http_header.py deleted file mode 100644 index dead523..0000000 --- a/linkedin_matrix/db/http_header.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -from asyncpg import Record -from attr import dataclass - -from mautrix.types import UserID - -from .model_base import Model - - -@dataclass -class HttpHeader(Model): - mxid: UserID - name: str - value: str - - _table_name = "http_header" - _field_list = [ - "mxid", - "name", - "value", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> HttpHeader | None: - if row is None: - return None - return cls(**row) - - @classmethod - async def get_for_mxid(cls, mxid: id.UserID) -> list[HttpHeader]: - query = HttpHeader.select_constructor("mxid=$1") - rows = await cls.db.fetch(query, mxid) - return [cls._from_row(row) for row in rows if row] - - @classmethod - async def delete_all_for_mxid(cls, mxid: id.UserID): - await cls.db.execute("DELETE FROM http_header WHERE mxid=$1", mxid) - - @classmethod - async def bulk_upsert(cls, mxid: id.UserID, http_headers: dict[str, str]): - for name, value in http_headers.items(): - http_header = cls(mxid, name, value) - await http_header.upsert() - - async def upsert(self): - query = """ - INSERT INTO http_header (mxid, name, value) - VALUES ($1, $2, $3) - ON CONFLICT (mxid, name) - DO UPDATE - SET value=excluded.value - """ - await self.db.execute(query, self.mxid, self.name, self.value) diff --git a/linkedin_matrix/db/message.py b/linkedin_matrix/db/message.py deleted file mode 100644 index 03307e3..0000000 --- a/linkedin_matrix/db/message.py +++ /dev/null @@ -1,173 +0,0 @@ -from __future__ import annotations - -from typing import cast -from datetime import datetime - -from asyncpg import Record -from attr import dataclass - -from linkedin_messaging import URN -from mautrix.types import EventID, RoomID -from mautrix.util.async_db import Scheme - -from .model_base import Model - - -@dataclass -class Message(Model): - mxid: EventID - mx_room: RoomID - li_message_urn: URN - li_thread_urn: URN - li_sender_urn: URN - li_receiver_urn: URN - index: int - timestamp: datetime - - _table_name = "message" - _field_list = [ - "mxid", - "mx_room", - "li_message_urn", - "li_thread_urn", - "li_sender_urn", - "li_receiver_urn", - "index", - "timestamp", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> Message | None: - if row is None: - return None - data = {**row} - li_message_urn = data.pop("li_message_urn") - li_thread_urn = data.pop("li_thread_urn") - li_sender_urn = data.pop("li_sender_urn") - li_receiver_urn = data.pop("li_receiver_urn") - timestamp = data.pop("timestamp") - return cls( - **data, - li_message_urn=URN(li_message_urn), - li_thread_urn=URN(li_thread_urn), - li_sender_urn=URN(li_sender_urn), - li_receiver_urn=URN(li_receiver_urn), - timestamp=datetime.fromtimestamp(timestamp), - ) - - @classmethod - async def get_all_by_li_message_urn( - cls, - li_message_urn: URN, - li_receiver_urn: URN, - ) -> list["Message"]: - query = Message.select_constructor("li_message_urn=$1 AND li_receiver_urn=$2") - rows = await cls.db.fetch(query, li_message_urn.id_str(), li_receiver_urn.id_str()) - return [cast(Message, cls._from_row(row)) for row in rows if row] - - @classmethod - async def get_by_li_message_urn( - cls, - li_message_urn: URN, - li_receiver_urn: URN, - index: int = 0, - ) -> Message | None: - query = Message.select_constructor( - """ - li_message_urn=$1 AND li_receiver_urn=$2 AND "index"=$3 - """ - ) - row = await cls.db.fetchrow( - query, - li_message_urn.id_str(), - li_receiver_urn.id_str(), - index, - ) - return cls._from_row(row) - - @classmethod - async def delete_all_by_room(cls, room_id: RoomID): - await cls.db.execute("DELETE FROM message WHERE mx_room=$1", room_id) - - @classmethod - async def get_by_mxid(cls, mxid: EventID, mx_room: RoomID) -> Message | None: - query = Message.select_constructor("mxid=$1 AND mx_room=$2") - row = await cls.db.fetchrow(query, mxid, mx_room) - return cls._from_row(row) - - @classmethod - async def get_most_recent( - cls, - li_thread_urn: URN, - li_receiver_urn: URN, - ) -> Message | None: - query = ( - Message.select_constructor("li_thread_urn=$1 AND li_receiver_urn=$2") - + ' ORDER BY timestamp DESC, "index" DESC' - + " LIMIT 1" - ) - row = await cls.db.fetchrow(query, li_thread_urn.id_str(), li_receiver_urn.id_str()) - return cls._from_row(row) - - async def insert(self): - query = Message.insert_constructor() - await self.db.execute( - query, - self.mxid, - self.mx_room, - self.li_message_urn.id_str(), - self.li_thread_urn.id_str(), - self.li_sender_urn.id_str(), - self.li_receiver_urn.id_str(), - self.index, - self.timestamp.timestamp(), - ) - - @classmethod - async def bulk_create( - cls, - li_message_urn: URN, - li_thread_urn: URN, - li_sender_urn: URN, - li_receiver_urn: URN, - timestamp: datetime, - event_ids: list[EventID], - mx_room: RoomID, - ): - if not event_ids: - return - - records = [ - ( - mxid, - mx_room, - li_message_urn.id_str(), - li_thread_urn.id_str(), - li_sender_urn.id_str(), - li_receiver_urn.id_str(), - index, - timestamp.timestamp(), - ) - for index, mxid in enumerate(event_ids) - ] - async with cls.db.acquire() as conn, conn.transaction(): - if cls.db.scheme == Scheme.POSTGRES: - await conn.copy_records_to_table( - "message", records=records, columns=cls._field_list - ) - else: - await conn.executemany(Message.insert_constructor(), records) - - async def delete(self): - q = """ - DELETE FROM message - WHERE li_message_urn=$1 - AND li_receiver_urn=$2 - AND "index"=$3 - """ - await self.db.execute( - q, - self.li_message_urn.id_str(), - self.li_receiver_urn.id_str(), - self.index, - ) diff --git a/linkedin_matrix/db/model_base.py b/linkedin_matrix/db/model_base.py deleted file mode 100644 index e464147..0000000 --- a/linkedin_matrix/db/model_base.py +++ /dev/null @@ -1,34 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, ClassVar - -from mautrix.util.async_db import Database - -fake_db = Database("") if TYPE_CHECKING else None - - -class Model: - # Ignore type errors here since the variable will always be set in db/__init__.py. - db: ClassVar[Database] = fake_db # type: ignore - - _table_name: str - _field_list: list[str] - - @classmethod - def field_list_str(cls) -> str: - return ",".join(map(lambda f: f'"{f}"', cls._field_list)) - - @classmethod - def select_constructor(cls, where_clause: str | None = None) -> str: - query = f'SELECT {cls.field_list_str()} FROM "{cls._table_name}"' - if where_clause: - query += f" WHERE {where_clause}" - return query - - @classmethod - def insert_constructor(cls) -> str: - values_str = ",".join(f"${i+1}" for i in range(len(cls._field_list))) - return f""" - INSERT INTO "{cls._table_name}" ({cls.field_list_str()}) - VALUES ({values_str}) - """ diff --git a/linkedin_matrix/db/portal.py b/linkedin_matrix/db/portal.py deleted file mode 100644 index 401f52e..0000000 --- a/linkedin_matrix/db/portal.py +++ /dev/null @@ -1,158 +0,0 @@ -from __future__ import annotations - -from typing import cast - -from asyncpg import Record -from attr import dataclass - -from linkedin_messaging import URN -from mautrix.types import ContentURI, RoomID - -from .model_base import Model - - -@dataclass -class Portal(Model): - li_thread_urn: URN - li_receiver_urn: URN | None - li_is_group_chat: bool - li_other_user_urn: URN | None - - mxid: RoomID | None - encrypted: bool - - name: str | None - photo_id: str | None - avatar_url: ContentURI | None - topic: str | None - name_set: bool - avatar_set: bool - topic_set: bool - - _table_name = "portal" - _field_list = [ - # LinkedIn chat information - "li_thread_urn", - "li_receiver_urn", - "li_is_group_chat", - "li_other_user_urn", - # Matrix portal information - "mxid", - "encrypted", - # Chat metadata - "name", - "photo_id", - "avatar_url", - "topic", - "name_set", - "avatar_set", - "topic_set", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> Portal | None: - if row is None: - return None - data = {**row} - li_thread_urn = data.pop("li_thread_urn") - li_receiver_urn = data.pop("li_receiver_urn", None) - li_other_user_urn = data.pop("li_other_user_urn", None) - return cls( - **data, - li_thread_urn=URN(li_thread_urn), - li_receiver_urn=URN(li_receiver_urn) if li_receiver_urn else None, - li_other_user_urn=URN(li_other_user_urn) if li_other_user_urn else None, - ) - - @classmethod - async def get_by_li_thread_urn( - cls, - li_thread_urn: URN, - li_receiver_urn: URN | None, - ) -> Portal | None: - query = Portal.select_constructor("li_thread_urn=$1 AND li_receiver_urn=$2") - row = await cls.db.fetchrow( - query, - li_thread_urn.id_str(), - li_receiver_urn.id_str() if li_receiver_urn else None, - ) - return cls._from_row(row) - - @classmethod - async def get_by_mxid(cls, mxid: RoomID) -> Portal | None: - query = Portal.select_constructor("mxid=$1") - row = await cls.db.fetchrow(query, mxid) - return cls._from_row(row) - - @classmethod - async def get_all_by_li_receiver_urn(cls, li_receiver_urn: URN) -> list["Portal"]: - query = Portal.select_constructor("li_receiver_urn=$1") - rows = await cls.db.fetch(query, li_receiver_urn.id_str()) - return [cast(Portal, cls._from_row(row)) for row in rows if row] - - @classmethod - async def all(cls) -> list["Portal"]: - query = Portal.select_constructor() - rows = await cls.db.fetch(query) - return [cast(Portal, cls._from_row(row)) for row in rows if row] - - async def insert(self): - query = Portal.insert_constructor() - await self.db.execute( - query, - self.li_thread_urn.id_str(), - self.li_receiver_urn.id_str() if self.li_receiver_urn else None, - self.li_is_group_chat, - self.li_other_user_urn.id_str() if self.li_other_user_urn else None, - self.mxid, - self.encrypted, - self.name, - self.photo_id, - self.avatar_url, - self.topic, - self.name_set, - self.avatar_set, - self.topic_set, - ) - - async def delete(self): - q = "DELETE FROM portal WHERE li_thread_urn=$1 AND li_receiver_urn=$2" - await self.db.execute( - q, - self.li_thread_urn.id_str(), - self.li_receiver_urn.id_str() if self.li_receiver_urn else None, - ) - - async def save(self): - query = """ - UPDATE portal - SET li_is_group_chat=$3, - li_other_user_urn=$4, - mxid=$5, - encrypted=$6, - name=$7, - photo_id=$8, - avatar_url=$9, - topic=$10, - name_set=$11, - avatar_set=$12, - topic_set=$13 - WHERE li_thread_urn=$1 - AND li_receiver_urn=$2 - """ - await self.db.execute( - query, - self.li_thread_urn.id_str(), - self.li_receiver_urn.id_str() if self.li_receiver_urn else None, - self.li_is_group_chat, - self.li_other_user_urn.id_str() if self.li_other_user_urn else None, - self.mxid, - self.encrypted, - self.name, - self.photo_id, - self.avatar_url, - self.topic, - self.name_set, - self.avatar_set, - self.topic_set, - ) diff --git a/linkedin_matrix/db/puppet.py b/linkedin_matrix/db/puppet.py deleted file mode 100644 index 1afc2ae..0000000 --- a/linkedin_matrix/db/puppet.py +++ /dev/null @@ -1,139 +0,0 @@ -from __future__ import annotations - -from typing import cast - -from asyncpg import Record -from attr import dataclass -from yarl import URL - -from linkedin_messaging import URN -from mautrix.types import ContentURI, SyncToken, UserID - -from .model_base import Model - - -@dataclass -class Puppet(Model): - li_member_urn: URN - name: str | None - photo_id: str | None - photo_mxc: ContentURI | None - - custom_mxid: UserID | None - access_token: str | None - next_batch: SyncToken | None - base_url: URL | None - - name_set: bool = False - avatar_set: bool = False - contact_info_set: bool = False - is_registered: bool = False - - _table_name = "puppet" - _field_list = [ - "li_member_urn", - "name", - "photo_id", - "photo_mxc", - "name_set", - "avatar_set", - "contact_info_set", - "is_registered", - "custom_mxid", - "access_token", - "next_batch", - "base_url", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> Puppet | None: - if row is None: - return None - data = {**row} - base_url = data.pop("base_url", None) - li_member_urn = data.pop("li_member_urn") - return cls( - **data, - base_url=URL(base_url) if base_url else None, - li_member_urn=URN(li_member_urn), - ) - - @classmethod - async def get_by_li_member_urn(cls, li_member_urn: URN) -> Puppet | None: - query = Puppet.select_constructor("li_member_urn=$1") - row = await cls.db.fetchrow(query, li_member_urn.id_str()) - return cls._from_row(row) - - @classmethod - async def get_by_name(cls, name: str) -> Puppet | None: - query = Puppet.select_constructor("name=$1") - row = await cls.db.fetchrow(query, name) - return cls._from_row(row) - - @classmethod - async def get_by_custom_mxid(cls, mxid: UserID) -> Puppet | None: - query = Puppet.select_constructor("custom_mxid=$1") - row = await cls.db.fetchrow(query, mxid) - return cls._from_row(row) - - @classmethod - async def get_all_with_custom_mxid(cls) -> list["Puppet"]: - query = Puppet.select_constructor("custom_mxid <> ''") - rows = await cls.db.fetch(query) - return [cast(Puppet, cls._from_row(row)) for row in rows if row] - - async def insert(self): - query = Puppet.insert_constructor() - await self.db.execute( - query, - self.li_member_urn.id_str(), - self.name, - self.photo_id, - self.photo_mxc, - self.name_set, - self.avatar_set, - self.contact_info_set, - self.is_registered, - self.custom_mxid, - self.access_token, - self.next_batch, - str(self.base_url) if self.base_url else None, - ) - - async def delete(self): - await self.db.execute( - "DELETE FROM puppet WHERE li_member_urn=$1", - self.li_member_urn.id_str(), - ) - - async def save(self): - query = """ - UPDATE puppet - SET name=$2, - photo_id=$3, - photo_mxc=$4, - name_set=$5, - avatar_set=$6, - contact_info_set=$7, - is_registered=$8, - custom_mxid=$9, - access_token=$10, - next_batch=$11, - base_url=$12 - WHERE li_member_urn=$1 - """ - await self.db.execute( - query, - self.li_member_urn.id_str(), - self.name, - self.photo_id, - self.photo_mxc, - self.name_set, - self.avatar_set, - self.contact_info_set, - self.is_registered, - self.custom_mxid, - self.access_token, - self.next_batch, - str(self.base_url) if self.base_url else None, - ) diff --git a/linkedin_matrix/db/reaction.py b/linkedin_matrix/db/reaction.py deleted file mode 100644 index 02d3a37..0000000 --- a/linkedin_matrix/db/reaction.py +++ /dev/null @@ -1,133 +0,0 @@ -from __future__ import annotations - -from asyncpg import Record -from attr import dataclass - -from linkedin_messaging import URN -from mautrix.types import EventID, RoomID - -from .model_base import Model - - -@dataclass -class Reaction(Model): - mxid: EventID - mx_room: RoomID - li_message_urn: URN - li_receiver_urn: URN - li_sender_urn: URN - reaction: str - - _table_name = "reaction" - _field_list = [ - "mxid", - "mx_room", - "li_message_urn", - "li_receiver_urn", - "li_sender_urn", - "reaction", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> Reaction | None: - if row is None: - return None - data = {**row} - li_message_urn = data.pop("li_message_urn") - li_receiver_urn = data.pop("li_receiver_urn") - li_sender_urn = data.pop("li_sender_urn") - return cls( - **data, - li_message_urn=URN(li_message_urn), - li_receiver_urn=URN(li_receiver_urn), - li_sender_urn=URN(li_sender_urn), - ) - - @classmethod - async def get_by_mxid(cls, mxid: EventID, mx_room: RoomID) -> Reaction | None: - query = Reaction.select_constructor("mxid=$1 AND mx_room=$2") - row = await cls.db.fetchrow(query, mxid, mx_room) - return cls._from_row(row) - - @classmethod - async def get_most_recent_by_li_message_urn( - cls, mx_room: RoomID, li_message_urn: URN - ) -> Reaction | None: - query = ( - Reaction.select_constructor("mx_room=$1 AND li_message_urn=$2") - + ' ORDER BY "index" DESC' - + " LIMIT 1" - ) - row = await cls.db.fetchrow(query, mx_room, li_message_urn.id_str()) - return cls._from_row(row) - - @classmethod - async def get_by_li_message_urn_and_emoji( - cls, - li_message_urn: URN, - li_receiver_urn: URN, - li_sender_urn: URN, - reaction: str, - ) -> Reaction | None: - query = Reaction.select_constructor( - """ - li_message_urn=$1 - AND li_receiver_urn=$2 - AND li_sender_urn=$3 - AND reaction=$4 - """ - ) - row = await cls.db.fetchrow( - query, - li_message_urn.id_str(), - li_receiver_urn.id_str(), - li_sender_urn.id_str(), - reaction, - ) - return cls._from_row(row) - - async def insert(self): - query = Reaction.insert_constructor() - await self.db.execute( - query, - self.mxid, - self.mx_room, - self.li_message_urn.id_str(), - self.li_receiver_urn.id_str(), - self.li_sender_urn.id_str(), - self.reaction, - ) - - async def delete(self): - query = """ - DELETE FROM reaction - WHERE li_message_urn=$1 - AND li_receiver_urn=$2 - AND li_sender_urn=$3 - """ - await self.db.execute( - query, - self.li_message_urn.id_str(), - self.li_receiver_urn.id_str(), - self.li_sender_urn.id_str(), - ) - - async def save(self): - query = """ - UPDATE reaction - SET mxid=$1, - mx_room=$2, - reaction=$3 - WHERE li_message_urn=$1 - AND li_receiver_urn=$2 - AND li_sender_urn=$3 - """ - await self.db.execute( - query, - self.mxid, - self.mx_room, - self.reaction, - self.li_message_urn.id_str(), - self.li_receiver_urn.id_str(), - self.li_sender_urn.id_str(), - ) diff --git a/linkedin_matrix/db/upgrade/__init__.py b/linkedin_matrix/db/upgrade/__init__.py deleted file mode 100644 index 2cee5df..0000000 --- a/linkedin_matrix/db/upgrade/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from mautrix.util.async_db import UpgradeTable - -upgrade_table = UpgradeTable() - -from . import ( # noqa: E402 - v01_initial_revision, - v02_multiple_reaction_per_message, - v03_add_topic_to_portal, - v04_add_portal_meta_set, - v05_add_index_to_reaction, - v06_add_space_mxid_to_user, - v07_puppet_contact_info_set, - v08_splat_pickle_data, - v09_cookie_table, - v10_http_header_table, -) - -__all__ = ( - "v01_initial_revision", - "v02_multiple_reaction_per_message", - "v03_add_topic_to_portal", - "v04_add_portal_meta_set", - "v05_add_index_to_reaction", - "v06_add_space_mxid_to_user", - "v07_puppet_contact_info_set", - "v08_splat_pickle_data", - "v09_cookie_table", - "v10_http_header_table", -) diff --git a/linkedin_matrix/db/upgrade/v01_initial_revision.py b/linkedin_matrix/db/upgrade/v01_initial_revision.py deleted file mode 100644 index 0eda66e..0000000 --- a/linkedin_matrix/db/upgrade/v01_initial_revision.py +++ /dev/null @@ -1,108 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Initial asyncpg revision", transaction=False) -async def upgrade_v1(conn: Connection): - create_table_queries = [ - """ - CREATE TABLE "user" ( - mxid TEXT PRIMARY KEY, - li_member_urn TEXT UNIQUE, - client_pickle BYTEA, - notice_room TEXT - ) - """, - """ - CREATE TABLE portal ( - li_thread_urn TEXT, - li_receiver_urn TEXT, - li_is_group_chat BOOLEAN NOT NULL DEFAULT false, - li_other_user_urn TEXT, - - mxid TEXT UNIQUE, - encrypted BOOLEAN NOT NULL DEFAULT false, - - name TEXT, - photo_id TEXT, - avatar_url TEXT, - - PRIMARY KEY (li_thread_urn, li_receiver_urn) - ) - """, - """ - CREATE TABLE puppet ( - li_member_urn TEXT PRIMARY KEY, - name TEXT, - photo_id TEXT, - photo_mxc TEXT, - - name_set BOOLEAN NOT NULL DEFAULT false, - avatar_set BOOLEAN NOT NULL DEFAULT false, - is_registered BOOLEAN NOT NULL DEFAULT false, - - custom_mxid TEXT, - access_token TEXT, - next_batch TEXT, - base_url TEXT - ) - """, - """ - CREATE TABLE message ( - mxid TEXT, - mx_room TEXT, - li_message_urn TEXT, - li_thread_urn TEXT, - li_sender_urn TEXT, - li_receiver_urn TEXT, - "index" SMALLINT, - timestamp FLOAT, - - PRIMARY KEY (li_message_urn, li_receiver_urn, "index"), - - FOREIGN KEY (li_thread_urn, li_receiver_urn) - REFERENCES portal (li_thread_urn, li_receiver_urn) - ON UPDATE CASCADE - ON DELETE CASCADE, - - UNIQUE (mxid, mx_room) - ) - """, - """ - CREATE TABLE reaction ( - mxid TEXT, - mx_room TEXT, - li_message_urn TEXT, - li_receiver_urn TEXT, - li_sender_urn TEXT, - reaction TEXT, - - PRIMARY KEY (li_message_urn, li_receiver_urn, li_sender_urn), - - UNIQUE (mxid, mx_room) - ) - """, - """ - CREATE TABLE user_portal ( - "user" TEXT, - portal TEXT, - portal_receiver TEXT, - - PRIMARY KEY ("user", portal, portal_receiver), - - FOREIGN KEY (portal, portal_receiver) - REFERENCES portal (li_thread_urn, li_receiver_urn) - ON UPDATE CASCADE - ON DELETE CASCADE, - - FOREIGN KEY ("user") - REFERENCES "user"(li_member_urn) - ON UPDATE CASCADE - ON DELETE CASCADE - ) - """, - ] - - for query in create_table_queries: - await conn.execute(query) diff --git a/linkedin_matrix/db/upgrade/v02_multiple_reaction_per_message.py b/linkedin_matrix/db/upgrade/v02_multiple_reaction_per_message.py deleted file mode 100644 index cbf039f..0000000 --- a/linkedin_matrix/db/upgrade/v02_multiple_reaction_per_message.py +++ /dev/null @@ -1,44 +0,0 @@ -from mautrix.util.async_db import Connection, Scheme - -from . import upgrade_table - - -@upgrade_table.register(description="Multiple reactions per message") -async def upgrade_v2(conn: Connection, scheme: Scheme): - if scheme != Scheme.SQLITE: - await conn.execute("ALTER TABLE reaction DROP CONSTRAINT reaction_pkey") - await conn.execute( - """ - ALTER TABLE reaction ADD PRIMARY KEY ( - li_message_urn, - li_receiver_urn, - li_sender_urn, - reaction - ) - """ - ) - else: - await conn.execute( - """ - CREATE TABLE reaction_v2 ( - mxid TEXT, - mx_room TEXT, - li_message_urn TEXT, - li_receiver_urn TEXT, - li_sender_urn TEXT, - reaction TEXT, - - PRIMARY KEY (li_message_urn, li_receiver_urn, li_sender_urn, reaction), - - UNIQUE (mxid, mx_room) - ) - """ - ) - await conn.execute( - """ - INSERT INTO reaction_v2 (mxid, mx_room, li_message_urn, li_receiver_urn, reaction) - SELECT mxid, mx_room, li_message_urn, li_receiver_urn, reaction FROM reaction - """ - ) - await conn.execute("DROP TABLE reaction") - await conn.execute("ALTER TABLE reaction_v2 RENAME TO reaction") diff --git a/linkedin_matrix/db/upgrade/v03_add_topic_to_portal.py b/linkedin_matrix/db/upgrade/v03_add_topic_to_portal.py deleted file mode 100644 index e2485bd..0000000 --- a/linkedin_matrix/db/upgrade/v03_add_topic_to_portal.py +++ /dev/null @@ -1,8 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add topic to portals") -async def upgrade_v3(conn: Connection): - await conn.execute("ALTER TABLE portal ADD COLUMN topic TEXT") diff --git a/linkedin_matrix/db/upgrade/v04_add_portal_meta_set.py b/linkedin_matrix/db/upgrade/v04_add_portal_meta_set.py deleted file mode 100644 index 1029c14..0000000 --- a/linkedin_matrix/db/upgrade/v04_add_portal_meta_set.py +++ /dev/null @@ -1,15 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add name_set, avatar_set, and topic_set to portals") -async def upgrade_v4(conn: Connection): - await conn.execute("ALTER TABLE portal ADD COLUMN name_set BOOLEAN NOT NULL DEFAULT false") - await conn.execute("ALTER TABLE portal ADD COLUMN avatar_set BOOLEAN NOT NULL DEFAULT false") - await conn.execute("ALTER TABLE portal ADD COLUMN topic_set BOOLEAN NOT NULL DEFAULT false") - await conn.execute("UPDATE portal SET name_set=true WHERE name<>''") - # We don't set avatar_set to true because there was a bug that caused avatars to - # be set incorrectly, so we want everything to be reset. - # We also don't set topic_set to true because none of the topics have been - # stored in the database due to a bug. diff --git a/linkedin_matrix/db/upgrade/v05_add_index_to_reaction.py b/linkedin_matrix/db/upgrade/v05_add_index_to_reaction.py deleted file mode 100644 index f44e996..0000000 --- a/linkedin_matrix/db/upgrade/v05_add_index_to_reaction.py +++ /dev/null @@ -1,55 +0,0 @@ -from mautrix.util.async_db import Connection, Scheme - -from . import upgrade_table - - -@upgrade_table.register(description="Add index to reaction table") -async def upgrade_v5(conn: Connection, scheme: Scheme): - if scheme != Scheme.SQLITE: - await conn.execute('ALTER TABLE reaction ADD COLUMN "index" INTEGER') - - # Give everything an index based on row number - await conn.execute( - """ - UPDATE reaction - SET "index" = t.rownum - FROM (SELECT mxid, mx_room, ROW_NUMBER() OVER () AS rownum - FROM reaction) t - WHERE reaction.mxid = t.mxid - AND reaction.mx_room = t.mx_room - """ - ) - - # Make the new column SERIAL - await conn.execute('ALTER TABLE reaction ALTER COLUMN "index" SET NOT NULL') - await conn.execute( - """ - ALTER TABLE reaction ALTER COLUMN "index" ADD GENERATED BY DEFAULT AS IDENTITY - """ - ) - else: - await conn.execute( - """ - CREATE TABLE reaction_v5 ( - rowid INTEGER PRIMARY KEY AUTOINCREMENT, - mxid TEXT, - mx_room TEXT, - li_message_urn TEXT, - li_receiver_urn TEXT, - li_sender_urn TEXT, - reaction TEXT, - - UNIQUE (mxid, mx_room) - ) - """ - ) - await conn.execute( - """ - INSERT INTO reaction_v5 ( - rowid, mxid, mx_room, li_message_urn, li_receiver_urn, reaction - ) - SELECT rowid, mxid, mx_room, li_message_urn, li_receiver_urn, reaction FROM reaction - """ - ) - await conn.execute("DROP TABLE reaction") - await conn.execute("ALTER TABLE reaction_v5 RENAME TO reaction") diff --git a/linkedin_matrix/db/upgrade/v06_add_space_mxid_to_user.py b/linkedin_matrix/db/upgrade/v06_add_space_mxid_to_user.py deleted file mode 100644 index 9d1898a..0000000 --- a/linkedin_matrix/db/upgrade/v06_add_space_mxid_to_user.py +++ /dev/null @@ -1,8 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add space MXID to User") -async def upgrade_v6(conn: Connection): - await conn.execute('ALTER TABLE "user" ADD COLUMN space_mxid TEXT') diff --git a/linkedin_matrix/db/upgrade/v07_puppet_contact_info_set.py b/linkedin_matrix/db/upgrade/v07_puppet_contact_info_set.py deleted file mode 100644 index 7e9592c..0000000 --- a/linkedin_matrix/db/upgrade/v07_puppet_contact_info_set.py +++ /dev/null @@ -1,10 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add contact_info_set column to puppet table") -async def upgrade_v7(conn: Connection): - await conn.execute( - "ALTER TABLE puppet ADD COLUMN contact_info_set BOOLEAN NOT NULL DEFAULT false" - ) diff --git a/linkedin_matrix/db/upgrade/v08_splat_pickle_data.py b/linkedin_matrix/db/upgrade/v08_splat_pickle_data.py deleted file mode 100644 index f257dc6..0000000 --- a/linkedin_matrix/db/upgrade/v08_splat_pickle_data.py +++ /dev/null @@ -1,42 +0,0 @@ -import logging -import pickle - -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register( - description="Add credential columns to user table and populate them from client_pickle" -) -async def upgrade_v8(conn: Connection): - # First, add the columns for JSESSIONID and li_at. - await conn.execute('ALTER TABLE "user" ADD COLUMN jsessionid TEXT') - await conn.execute('ALTER TABLE "user" ADD COLUMN li_at TEXT') - - # Now, unpickle the data from client_pickle and put it into the new columns. - for row in await conn.fetch('SELECT mxid, client_pickle FROM "user"'): - user_id = row["mxid"] - client_pickle = row["client_pickle"] - if client_pickle is None: - logging.warning(f"User {user_id} has no client_pickle") - continue - - cookies = pickle.loads(client_pickle) - jsessionid, li_at = None, None - for cookies in cookies.values(): - if j := cookies.get("JSESSIONID"): - jsessionid = j.value - if li := cookies.get("li_at"): - li_at = li.value - - if not jsessionid or not li_at: - logging.warning(f"User {user_id} doesn't have JSESSIONID or li_at") - continue - - await conn.execute( - 'UPDATE "user" SET jsessionid = $1, li_at = $2 WHERE mxid = $3', - jsessionid, - li_at, - user_id, - ) diff --git a/linkedin_matrix/db/upgrade/v09_cookie_table.py b/linkedin_matrix/db/upgrade/v09_cookie_table.py deleted file mode 100644 index 20c6d8b..0000000 --- a/linkedin_matrix/db/upgrade/v09_cookie_table.py +++ /dev/null @@ -1,36 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add a cookie table for storing all of the cookies") -async def upgrade_v9(conn: Connection): - await conn.execute( - """ - CREATE TABLE cookie ( - mxid TEXT, - name TEXT, - value TEXT, - - PRIMARY KEY (mxid, name) - ) - """ - ) - - for row in await conn.fetch('SELECT mxid, jsessionid, li_at FROM "user"'): - mxid = row["mxid"] - jsessionid = row["jsessionid"] - li_at = row["li_at"] - - if jsessionid: - await conn.execute( - "INSERT INTO cookie (mxid, name, value) VALUES ($1, 'JSESSIONID', $2)", - mxid, - jsessionid, - ) - if li_at: - await conn.execute( - "INSERT INTO cookie (mxid, name, value) VALUES ($1, 'li_at', $2)", - mxid, - li_at, - ) diff --git a/linkedin_matrix/db/upgrade/v10_http_header_table.py b/linkedin_matrix/db/upgrade/v10_http_header_table.py deleted file mode 100644 index 4447fc2..0000000 --- a/linkedin_matrix/db/upgrade/v10_http_header_table.py +++ /dev/null @@ -1,18 +0,0 @@ -from mautrix.util.async_db import Connection - -from . import upgrade_table - - -@upgrade_table.register(description="Add a header table for storing all of the headers") -async def upgrade_v10(conn: Connection): - await conn.execute( - """ - CREATE TABLE http_header ( - mxid TEXT, - name TEXT, - value TEXT, - - PRIMARY KEY (mxid, name) - ) - """ - ) diff --git a/linkedin_matrix/db/user.py b/linkedin_matrix/db/user.py deleted file mode 100644 index 62b1367..0000000 --- a/linkedin_matrix/db/user.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -from typing import cast - -from asyncpg import Record -from attr import dataclass - -from linkedin_messaging import URN -from mautrix.types import RoomID, UserID - -from .model_base import Model - - -@dataclass -class User(Model): - mxid: UserID - li_member_urn: URN | None - notice_room: RoomID | None - space_mxid: RoomID | None - - _table_name = "user" - _field_list = [ - "mxid", - "li_member_urn", - "notice_room", - "space_mxid", - ] - - @classmethod - def _from_row(cls, row: Record | None) -> User | None: - if row is None: - return None - data = {**row} - li_member_urn = data.pop("li_member_urn") - return cls( - li_member_urn=URN(li_member_urn) if li_member_urn else None, - **data, - ) - - @classmethod - async def all_logged_in(cls) -> list["User"]: - query = User.select_constructor("li_member_urn <> ''") - rows = await cls.db.fetch(query) - return [cast(User, cls._from_row(row)) for row in rows if row] - - @classmethod - async def get_by_li_member_urn(cls, li_member_urn: URN) -> User | None: - query = User.select_constructor("li_member_urn=$1") - row = await cls.db.fetchrow(query, li_member_urn.id_str()) - return cls._from_row(row) - - @classmethod - async def get_by_mxid(cls, mxid: UserID) -> User | None: - query = User.select_constructor("mxid=$1") - row = await cls.db.fetchrow(query, mxid) - return cls._from_row(row) - - async def insert(self): - query = User.insert_constructor() - await self.db.execute( - query, - self.mxid, - self.li_member_urn.id_str() if self.li_member_urn else None, - self.notice_room, - self.space_mxid, - ) - - async def delete(self): - await self.db.execute('DELETE FROM "user" WHERE mxid=$1', self.mxid) - - async def save(self): - query = """ - UPDATE "user" - SET li_member_urn=$2, - notice_room=$3, - space_mxid=$4 - WHERE mxid=$1 - """ - await self.db.execute( - query, - self.mxid, - self.li_member_urn.id_str() if self.li_member_urn else None, - self.notice_room, - self.space_mxid, - ) diff --git a/linkedin_matrix/db/user_portal.py b/linkedin_matrix/db/user_portal.py deleted file mode 100644 index b0c83e7..0000000 --- a/linkedin_matrix/db/user_portal.py +++ /dev/null @@ -1,56 +0,0 @@ -from __future__ import annotations - -from asyncpg import Record -from attr import dataclass - -from .model_base import Model - - -@dataclass -class UserPortal(Model): - user: str - portal: str - portal_receiver: str - - _table_name = "user_portal" - _field_list = ["user", "portal", "portal_receiver"] - - @classmethod - def _from_row(cls, row: Record | None) -> UserPortal | None: - if row is None: - return None - return cls(**row) - - @classmethod - async def all(cls, user: str) -> dict[str, "UserPortal"]: - query = UserPortal.select_constructor('"user"=$1') - rows = await cls.db.fetch(query, user) - return {up.portal: up for up in (cls._from_row(row) for row in rows) if up} - - @classmethod - async def get( - cls, - user: str, - portal: str, - portal_receiver: str, - ) -> UserPortal | None: - query = UserPortal.select_constructor('"user"=$1 AND portal=$2 AND portal_receiver=$3') - row = await cls.db.fetchrow(query, user, portal, portal_receiver) - return cls._from_row(row) - - async def insert(self): - query = UserPortal.insert_constructor() - await self.db.execute(query, self.user, self.portal, self.portal_receiver) - - async def delete(self): - query = """ - DELETE FROM user_portal - WHERE "user"=$1 - AND portal=$2 - AND portal_receiver=$3 - """ - await self.db.execute(query, self.user, self.portal, self.portal_receiver) - - @classmethod - async def delete_all(cls, user: int): - await cls.db.execute('DELETE FROM user_portal WHERE "user"=$1', user) diff --git a/linkedin_matrix/example-config.yaml b/linkedin_matrix/example-config.yaml deleted file mode 100644 index 983626a..0000000 --- a/linkedin_matrix/example-config.yaml +++ /dev/null @@ -1,351 +0,0 @@ -# Homeserver details -homeserver: - # The address that this appservice can use to connect to the homeserver. - address: https://example.com - # The domain of the homeserver (for MXIDs, etc). - domain: example.com - # Whether or not to verify the SSL certificate of the homeserver. - # Only applies if address starts with https:// - verify_ssl: true - # What software is the homeserver running? - # Standard Matrix homeservers like Synapse, Dendrite and Conduit should just use "standard" here. - software: standard - # Number of retries for all HTTP requests if the homeserver isn't reachable. - http_retry_count: 4 - # The URL to push real-time bridge status to. - # If set, the bridge will make POST requests to this URL whenever a user's Signal connection state changes. - # The bridge will use the appservice as_token to authorize requests. - status_endpoint: null - # Endpoint for reporting per-message status. - message_send_checkpoint_endpoint: null - # Whether asynchronous uploads via MSC2246 should be enabled for media. - # Requires a media repo that supports MSC2246. - async_media: false - -# Application service host/registration related details -# Changing these values requires regeneration of the registration. -appservice: - # The address that the homeserver can use to connect to this appservice. - address: http://localhost:29319 - - # The hostname and port where this appservice should listen. - hostname: localhost - port: 29319 - # The maximum body size of appservice API requests (from the homeserver) in mebibytes - # Usually 1 is enough, but on high-traffic bridges you might need to increase this to avoid 413s - max_body_size: 1 - - # The full URI to the database. SQLite and Postgres are supported. - # Format examples: - # SQLite: sqlite:filename.db - # Postgres: postgres://username:password@hostname/dbname - database: postgres://username:password@hostname/db - # Additional arguments for asyncpg.create_pool() or sqlite3.connect() - # https://magicstack.github.io/asyncpg/current/api/index.html#asyncpg.pool.create_pool - # https://docs.python.org/3/library/sqlite3.html#sqlite3.connect - # For sqlite, min_size is used as the connection thread pool size and max_size is ignored. - # Additionally, SQLite supports init_commands as an array of SQL queries to run on connect (e.g. to set PRAGMAs). - database_opts: - min_size: 1 - max_size: 10 - - # Provisioning API part of the web server for automated portal creation and fetching information. - # Used by things like mautrix-manager (https://github.com/tulir/mautrix-manager). - provisioning: - # Whether or not the provisioning API should be enabled. - enabled: true - # The prefix to use in the provisioning API endpoints. - prefix: /_matrix/provision/v1 - # The shared secret to authorize users of the API. - # Set to "generate" to generate and save a new token. - shared_secret: generate - - # The unique ID of this appservice. - id: linkedin - # Username of the appservice bot. - bot_username: linkedinbot - # Display name and avatar for bot. Set to "remove" to remove display name/avatar, leave empty - # to leave display name/avatar as-is. - bot_displayname: LinkedIn bridge bot - bot_avatar: mxc://nevarro.space/cwsWnmeMpWSMZLUNblJHaIvP - - # Whether or not to receive ephemeral events via appservice transactions. - # Requires MSC2409 support (i.e. Synapse 1.22+). - # You should disable bridge -> sync_with_custom_puppets when this is enabled. - ephemeral_events: false - - # Authentication tokens for AS <-> HS communication. Autogenerated; do not modify. - as_token: "This value is generated when generating the registration" - hs_token: "This value is generated when generating the registration" - -# Segment-compatible analytics endpoint for tracking some events, like provisioning API login and encryption errors. -analytics: - # Hostname of the tracking server. The path is hardcoded to /v1/track - host: api.segment.io - # API key to send with tracking requests. Tracking is disabled if this is null. - token: null - # Optional user ID for tracking events. If null, defaults to using Matrix user ID. - user_id: null - -# Prometheus telemetry config. Requires prometheus-client to be installed. -metrics: - enabled: false - listen_port: 8000 - -# Manhole config. -manhole: - # Whether or not opening the manhole is allowed. - enabled: false - # The path for the unix socket. - path: /var/tmp/linkedin-matrix.manhole - # The list of UIDs who can be added to the whitelist. - # If empty, any UIDs can be specified in the open-manhole command. - whitelist: - - 0 - -# Bridge config -bridge: - # Localpart template of MXIDs for LinkedIn users. - # {userid} is replaced with the user ID of the LinkedIn user. - username_template: "linkedin_{userid}" - # Settings for creating a space for every user. - space_support: - # Whether or not to enable creating a space per user and inviting the - # user (as well as all of the puppets) to that space. - enable: false - # The name of the space - name: "LinkedIn" - # Displayname template for LinkedIn users. - # {displayname} is replaced with the display name of the LinkedIn user - # as defined below in displayname_preference. - # Keys available for displayname_preference are also available here. - displayname_template: "{displayname} (LinkedIn)" - # Available keys: - # "name" (full name) - # "first_name" - # "last_name" - displayname_preference: - - name - - first_name - - # Whether or not to set the topic on DMs to the user's occupation and a - # link to their profile. - set_topic_on_dms: true - - # The prefix for commands. Only required in non-management rooms. - command_prefix: "!li" - - # Number of chats to sync (and create portals for) on startup/login. - # Set 0 to disable automatic syncing. - initial_chat_sync: 20 - # Whether or not the LinkedIn users of logged in Matrix users should be - # invited to private chats when the user sends a message from another client. - invite_own_puppet_to_pm: false - # Whether or not to use /sync to get presence, read receipts and typing notifications - # when double puppeting is enabled - sync_with_custom_puppets: true - # Whether or not to update the m.direct account data event when double puppeting is enabled. - # Note that updating the m.direct event is not atomic (except with mautrix-asmux) - # and is therefore prone to race conditions. - sync_direct_chat_list: false - # Servers to always allow double puppeting from - double_puppet_server_map: - example.com: https://example.com - # Allow using double puppeting from any server with a valid client .well-known file. - double_puppet_allow_discovery: false - # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth - # - # If set, custom puppets will be enabled automatically for local users - # instead of users having to find an access token and run `login-matrix` - # manually. - # If using this for other servers than the bridge's server, - # you must also set the URL in the double_puppet_server_map. - login_shared_secret_map: - example.com: foobar - # Whether or not to bridge presence in both directions. LinkedIn allows users not to broadcast - # presence, but then it won't send other users' presence to the client. - presence: true - # Whether or not to update avatars when syncing all contacts at startup. - update_avatar_initial_sync: true - # Whether or not created rooms should have federation enabled. - # If false, created portal rooms will never be federated. - federate_rooms: true - # Whether to explicitly set the avatar and room name for private chat portal rooms. - # If set to `default`, this will be enabled in encrypted rooms and disabled in unencrypted rooms. - # If set to `always`, all DM rooms will have explicit names and avatars set. - # If set to `never`, DM rooms will never have names and avatars set. - private_chat_portal_meta: default - - # End-to-bridge encryption support options. - # - # See https://docs.mau.fi/bridges/general/end-to-bridge-encryption.html for more info. - encryption: - # Allow encryption, work in group chat rooms with e2ee enabled - allow: false - # Default to encryption, force-enable encryption in all portals the bridge creates - # This will cause the bridge bot to be in private chats for the encryption to work properly. - default: false - # Whether to use MSC2409/MSC3202 instead of /sync long polling for receiving encryption-related data. - appservice: false - # Require encryption, drop any unencrypted messages. - require: false - # Enable key sharing? If enabled, key requests for rooms where users are in will be fulfilled. - # You must use a client that supports requesting keys from other users to use this feature. - allow_key_sharing: false - # Options for deleting megolm sessions from the bridge. - delete_keys: - # Beeper-specific: delete outbound sessions when hungryserv confirms - # that the user has uploaded the key to key backup. - delete_outbound_on_ack: false - # Don't store outbound sessions in the inbound table. - dont_store_outbound: false - # Ratchet megolm sessions forward after decrypting messages. - ratchet_on_decrypt: false - # Delete fully used keys (index >= max_messages) after decrypting messages. - delete_fully_used_on_decrypt: false - # Delete previous megolm sessions from same device when receiving a new one. - delete_prev_on_new_session: false - # Delete megolm sessions received from a device when the device is deleted. - delete_on_device_delete: false - # Periodically delete megolm sessions when 2x max_age has passed since receiving the session. - periodically_delete_expired: false - # Delete inbound megolm sessions that don't have the received_at field used for - # automatic ratcheting and expired session deletion. This is meant as a migration - # to delete old keys prior to the bridge update. - delete_outdated_inbound: false - # What level of device verification should be required from users? - # - # Valid levels: - # unverified - Send keys to all device in the room. - # cross-signed-untrusted - Require valid cross-signing, but trust all cross-signing keys. - # cross-signed-tofu - Require valid cross-signing, trust cross-signing keys on first use (and reject changes). - # cross-signed-verified - Require valid cross-signing, plus a valid user signature from the bridge bot. - # Note that creating user signatures from the bridge bot is not currently possible. - # verified - Require manual per-device verification - # (currently only possible by modifying the `trust` column in the `crypto_device` database table). - verification_levels: - # Minimum level for which the bridge should send keys to when bridging messages from Telegram to Matrix. - receive: unverified - # Minimum level that the bridge should accept for incoming Matrix messages. - send: unverified - # Minimum level that the bridge should require for accepting key requests. - share: cross-signed-tofu - # Options for Megolm room key rotation. These options allow you to - # configure the m.room.encryption event content. See: - # https://spec.matrix.org/v1.3/client-server-api/#mroomencryption for - # more information about that event. - rotation: - # Enable custom Megolm room key rotation settings. Note that these - # settings will only apply to rooms created after this option is - # set. - enable_custom: false - # The maximum number of milliseconds a session should be used - # before changing it. The Matrix spec recommends 604800000 (a week) - # as the default. - milliseconds: 604800000 - # The maximum number of messages that should be sent with a given a - # session before changing it. The Matrix spec recommends 100 as the - # default. - messages: 100 - - # Disable rotating keys when a user's devices change? - # You should not enable this option unless you understand all the implications. - disable_device_change_key_rotation: false - - # Whether or not the bridge should send a read receipt from the bridge bot when a message has - # been sent to LinkedIn. - delivery_receipts: false - # Whether to allow inviting arbitrary mxids to portal rooms - allow_invites: false - # Settings for backfilling messages from LinkedIn. - backfill: - # Whether or not the LinkedIn users of logged in Matrix users should be - # invited to private chats when backfilling history from LinkedIn. This is - # usually needed to prevent rate limits and to allow timestamp massaging. - invite_own_puppet: true - # Maximum number of messages to backfill initially. - # Set to 0 to disable backfilling when creating portal. - initial_limit: 0 - # Maximum number of messages to backfill if messages were missed while - # the bridge was disconnected. - # Set to 0 to disable backfilling missed messages. - missed_limit: 1000 - # If using double puppeting, should notifications be disabled - # while the initial backfill is in progress? - disable_notifications: false - # If this value is greater than 0, then (on backfill) if the - # conversation's last message was more than this number of hours ago, - # then the conversation will automatically be marked it as read. - unread_hours_threshold: 0 - periodic_reconnect: - # TODO needed? - # Interval in seconds in which to automatically reconnect all users. - # This can be used to automatically mitigate the bug where Linkedin stops sending messages. - # Set to -1 to disable periodic reconnections entirely. - interval: -1 - # What to do in periodic reconnects. Either "refresh" or "reconnect" - mode: refresh - # Should even disconnected users be reconnected? - always: false - # The number of seconds that a disconnection can last without triggering an automatic re-sync - # and missed message backfilling when reconnecting. - # Set to 0 to always re-sync, or -1 to never re-sync automatically. - resync_max_disconnected_time: 5 - # Whether or not temporary disconnections should send notices to the notice room. - # If this is false, disconnections will never send messages and connections will only send - # messages if it was disconnected for more than resync_max_disconnected_time seconds. - temporary_disconnect_notices: true - # Whether or not the bridge should try to "refresh" the connection if a normal reconnection - # attempt fails. - refresh_on_reconnection_fail: false - # Set this to true to tell the bridge to re-send m.bridge events to all rooms on the next run. - # This field will automatically be changed back to false after it, - # except if the config file is not writable. - resend_bridge_info: false - # When using double puppeting, should muted chats be muted in Matrix? - mute_bridging: false - # Whether or not mute status and tags should only be bridged when the portal room is created. - tag_only_on_create: true - - # Permissions for using the bridge. - # Permitted values: - # user - Use the bridge with puppeting. - # admin - Use and administrate the bridge. - # Permitted keys: - # * - All Matrix users - # domain - All users on that homeserver - # mxid - Specific user - permissions: - "example.com": "user" - "@admin:example.com": "admin" - -# Python logging configuration. -# -# See section 16.7.2 of the Python documentation for more info: -# https://docs.python.org/3.6/library/logging.config.html#configuration-dictionary-schema -logging: - version: 1 - formatters: - colored: - (): mautrix.util.logging.color.ColorFormatter - format: "[%(asctime)s] [%(levelname)s@%(name)s] %(message)s" - normal: - format: "[%(asctime)s] [%(levelname)s@%(name)s] %(message)s" - handlers: - file: - class: logging.handlers.RotatingFileHandler - formatter: normal - filename: ./linkedin-matrix.log - maxBytes: 10485760 - backupCount: 10 - console: - class: logging.StreamHandler - formatter: colored - loggers: - mau: - level: DEBUG - aiohttp: - level: INFO - root: - level: DEBUG - handlers: [file, console] diff --git a/linkedin_matrix/formatter/__init__.py b/linkedin_matrix/formatter/__init__.py deleted file mode 100644 index 6c12022..0000000 --- a/linkedin_matrix/formatter/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .from_linkedin import ( - linkedin_spinmail_to_matrix, - linkedin_subject_to_matrix, - linkedin_to_matrix, -) -from .from_matrix import matrix_to_linkedin - -__all__ = ( - "linkedin_spinmail_to_matrix", - "linkedin_subject_to_matrix", - "linkedin_to_matrix", - "matrix_to_linkedin", -) diff --git a/linkedin_matrix/formatter/from_linkedin.py b/linkedin_matrix/formatter/from_linkedin.py deleted file mode 100644 index 576b4d1..0000000 --- a/linkedin_matrix/formatter/from_linkedin.py +++ /dev/null @@ -1,109 +0,0 @@ -from __future__ import annotations - -from html import escape - -from bs4 import BeautifulSoup - -from linkedin_messaging import URN -from linkedin_messaging.api_objects import AttributedBody, SpInmailContent -from mautrix.types import Format, MessageType, TextMessageEventContent - -from .. import puppet as pu, user as u - - -def linkedin_subject_to_matrix(subject: str) -> TextMessageEventContent: - body = f"Subject: {subject}" - return TextMessageEventContent( - msgtype=MessageType.TEXT, - body=body, - format=Format.HTML, - formatted_body=f"{body}", - ) - - -async def linkedin_to_matrix(msg: AttributedBody) -> TextMessageEventContent: - content = TextMessageEventContent(msgtype=MessageType.TEXT, body=msg.text) - - segments: list[str | tuple[str, URN]] = [] - profile_urns = [] - - text = msg.text - for m in sorted(msg.attributes, key=lambda a: a.start, reverse=True): - if ( - m.start is None - or m.length is None - or not m.type_ - or not m.type_.text_entity - or not m.type_.text_entity.urn - ): - continue - - text, original, after = ( - text[: m.start], - text[m.start : m.start + m.length], - text[m.start + m.length :], - ) - segments.append(after) - segments.append((original, m.type_.text_entity.urn)) - profile_urns.append(m.type_.text_entity.urn) - - segments.append(text) - - mention_user_map = {} - for profile_urn in profile_urns: - user = await u.User.get_by_li_member_urn(profile_urn) - if user: - mention_user_map[profile_urn] = user.mxid - else: - puppet = await pu.Puppet.get_by_li_member_urn(profile_urn, create=False) - if puppet: - mention_user_map[profile_urn] = puppet.mxid - - html = "" - for segment in reversed(segments): - if isinstance(segment, tuple): - text, profile_urn = segment - mxid = mention_user_map.get(profile_urn) - if not text.startswith("@"): - text = "@" + text - - if not mxid: - html += text - else: - html += f'{text}' - else: - html += escape(segment) - - html = html.replace("\n", "
") - - if html != escape(content.body).replace("\n", "
"): - content.format = Format.HTML - content.formatted_body = html - - return content - - -async def linkedin_spinmail_to_matrix( - sp_inmail_content: SpInmailContent, -) -> TextMessageEventContent: - label, body = sp_inmail_content.advertiser_label, sp_inmail_content.body - html_message = f"""{label}{body}""" - if sp_inmail_content.sub_content and sp_inmail_content.sub_content.standard: - action, action_text = ( - sp_inmail_content.sub_content.standard.action, - sp_inmail_content.sub_content.standard.action_text, - ) - html_message += f'

{action_text}

' - - if sp_inmail_content.legal_text: - html_message += "
Legal text" - html_message += sp_inmail_content.legal_text.static_legal_text - html_message += sp_inmail_content.legal_text.custom_legal_text - html_message += "
" - - return TextMessageEventContent( - msgtype=MessageType.TEXT, - body=BeautifulSoup(html_message).text, - format=Format.HTML, - formatted_body=html_message, - ) diff --git a/linkedin_matrix/formatter/from_matrix.py b/linkedin_matrix/formatter/from_matrix.py deleted file mode 100644 index 46a37e3..0000000 --- a/linkedin_matrix/formatter/from_matrix.py +++ /dev/null @@ -1,132 +0,0 @@ -from typing import Any, cast - -from linkedin_messaging import URN -from linkedin_messaging.api_objects import ( - Attribute, - AttributedBody, - AttributeType, - MessageCreate, - TextEntity, -) -from mautrix.appservice import IntentAPI -from mautrix.types import Format, MessageType, TextMessageEventContent -from mautrix.util.formatter import ( - EntityString, - EntityType, - MarkdownString, - MatrixParser as BaseMatrixParser, - SimpleEntity, -) -from mautrix.util.logging import TraceLogger - -from .. import puppet as pu, user as u - - -class LinkedInFormatString(EntityString[SimpleEntity, EntityType], MarkdownString): - def format(self, entity_type: EntityType, **kwargs: Any) -> "LinkedInFormatString": - prefix = suffix = "" - - if entity_type == EntityType.USER_MENTION: - self.entities.append( - SimpleEntity( - type=entity_type, - offset=0, - length=len(self.text), - extra_info={"user_id": kwargs["user_id"]}, - ) - ) - return self - elif entity_type == EntityType.URL: - if kwargs["url"] != self.text: - suffix = f" ({kwargs['url']})" - elif entity_type == EntityType.PREFORMATTED: - prefix = "```\n" - suffix = "```" - elif entity_type == EntityType.INLINE_CODE: - prefix = suffix = "`" - elif entity_type == EntityType.BLOCKQUOTE: - children = self.trim().split("\n") - children = [child.prepend("> ") for child in children] - return self.join(children, "\n") - else: - return self - - self._offset_entities(len(prefix)) - self.text = f"{prefix}{self.text}{suffix}" - return self - - -class MatrixParser(BaseMatrixParser[LinkedInFormatString]): - fs = LinkedInFormatString - - async def parse(cls, data: str) -> LinkedInFormatString: - return cast(LinkedInFormatString, await super().parse(data)) - - -async def matrix_to_linkedin( - content: TextMessageEventContent, - sender: "u.User", - intent: IntentAPI, - log: TraceLogger, -) -> MessageCreate: - assert sender.li_member_urn - - attributes = [] - - if content.format == Format.HTML and content.formatted_body: - parsed = await MatrixParser().parse(content.formatted_body) - - if content.msgtype == MessageType.EMOTE: - display_name = await intent.get_displayname(sender.mxid) - if display_name: - parsed.prepend(f"* {display_name} ") - attributes.append( - Attribute( - 2, - len(display_name), - AttributeType(TextEntity(sender.li_member_urn)), - ) - ) - else: - log.warning(f"Couldn't find displayname for {sender.mxid}") - - text = parsed.text - - for mention in parsed.entities: - mxid = mention.extra_info["user_id"] - user = await u.User.get_by_mxid(mxid, create=False) - li_member_urn: URN - if user and user.li_member_urn: - li_member_urn = user.li_member_urn - else: - puppet = await pu.Puppet.get_by_mxid(mxid, create=False) - if puppet: - li_member_urn = puppet.li_member_urn - else: - continue - if not li_member_urn.prefix: - li_member_urn.prefix = "urn:li:fs_miniProfile" - attributes.append( - Attribute( - mention.offset, - mention.length, - AttributeType(TextEntity(li_member_urn)), - ) - ) - else: - text = content.body - if content.msgtype == MessageType.EMOTE: - display_name = await intent.get_displayname(sender.mxid) - if display_name: - text = f"* {display_name} {text}" - attributes.append( - Attribute( - 2, - len(display_name), - AttributeType(TextEntity(sender.li_member_urn)), - ) - ) - else: - log.warning(f"Couldn't find displayname for {sender.mxid}") - - return MessageCreate(AttributedBody(text, attributes), body=text) diff --git a/linkedin_matrix/get_version.py b/linkedin_matrix/get_version.py deleted file mode 100644 index 5a3a5b8..0000000 --- a/linkedin_matrix/get_version.py +++ /dev/null @@ -1,53 +0,0 @@ -from __future__ import annotations - -from typing import Any -import os -import shutil -import subprocess - -from . import __version__ - -cmd_env = { - "PATH": os.environ["PATH"], - "HOME": os.environ["HOME"], - "LANG": "C", - "LC_ALL": "C", -} - - -def run(cmd: list[str]) -> Any: - return subprocess.check_output(cmd, stderr=subprocess.DEVNULL, env=cmd_env) - - -git_revision_url: str | None -if os.path.exists(".git") and shutil.which("git"): - try: - git_revision = run(["git", "rev-parse", "HEAD"]).strip().decode("ascii") - git_revision_url = f"https://github.com/beeper/linkedin/commit/{git_revision}" - git_revision = git_revision[:8] - except (subprocess.SubprocessError, OSError): - git_revision = "unknown" - git_revision_url = None - - try: - git_tag = run(["git", "describe", "--exact-match", "--tags"]).strip().decode("ascii") - except (subprocess.SubprocessError, OSError): - git_tag = None -else: - git_revision = "unknown" - git_revision_url = None - git_tag = None - -git_tag_url = f"https://github.com/beeper/linkedin/releases/tag/{git_tag}" if git_tag else None - -if git_tag and __version__ == git_tag[1:].replace("-", ""): - version = __version__ - linkified_version = f"[{version}]({git_tag_url})" -else: - if not __version__.endswith("+dev"): - __version__ += "+dev" - version = f"{__version__}.{git_revision}" - if git_revision_url: - linkified_version = f"{__version__}.[{git_revision}]({git_revision_url})" - else: - linkified_version = version diff --git a/linkedin_matrix/matrix.py b/linkedin_matrix/matrix.py deleted file mode 100644 index e01a5ab..0000000 --- a/linkedin_matrix/matrix.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, cast -import asyncio - -from mautrix.bridge import BaseMatrixHandler -from mautrix.types import ( - Event, - EventID, - EventType, - PresenceEvent, - PresenceEventContent, - ReceiptEvent, - RoomID, - TypingEvent, - UserID, -) -from mautrix.types.event.message import RelationType -from mautrix.types.event.reaction import ReactionEventContent - -# these have to be in this particular order to avoid circular imports -from . import portal as po, user as u - -if TYPE_CHECKING: - from .__main__ import LinkedInBridge - - -class MatrixHandler(BaseMatrixHandler): - def __init__(self, bridge: "LinkedInBridge"): - prefix, suffix = bridge.config["bridge.username_template"].format(userid=":").split(":") - homeserver = bridge.config["homeserver.domain"] - self.user_id_prefix = f"@{prefix}" - self.user_id_suffix = f"{suffix}:{homeserver}" - super().__init__(bridge=bridge) - - async def send_welcome_message(self, room_id: RoomID, inviter: "u.User"): - await super().send_welcome_message(room_id, inviter) - if not inviter.notice_room: - inviter.notice_room = room_id - await inviter.save() - await self.az.intent.send_notice( - room_id, - "This room has been marked as your LinkedIn Messages bridge notice room.", - ) - - async def handle_read_receipt(self, user: "u.User", portal: "po.Portal", *_): - if not user.client or not portal.mxid: - return - self.log.debug(f"{user.li_member_urn} read {portal.li_thread_urn}") - await user.client.mark_conversation_as_read(portal.li_thread_urn) - - async def handle_leave(self, room_id: RoomID, user_id: UserID, _): - portal = await po.Portal.get_by_mxid(room_id) - if not portal: - return - - user = await u.User.get_by_mxid(user_id, create=False) - if not user: - return - - await portal.handle_matrix_leave(user) - - @staticmethod - async def handle_redaction( - room_id: RoomID, - user_id: UserID, - event_id: EventID, - redaction_event_id: EventID, - ): - user = await u.User.get_by_mxid(user_id) - if not user: - return - - portal = await po.Portal.get_by_mxid(room_id) - if not portal: - return - - await portal.handle_matrix_redaction(user, event_id, redaction_event_id) - - @classmethod - async def handle_reaction( - cls, - room_id: RoomID, - user_id: UserID, - event_id: EventID, - content: ReactionEventContent, - ): - if content.relates_to.rel_type != RelationType.ANNOTATION: - cls.log.debug( - f"Ignoring m.reaction event in {room_id} from {user_id} with " - f"unexpected relation type {content.relates_to.rel_type}" - ) - return - user = await u.User.get_by_mxid(user_id) - if not user: - return - - portal = await po.Portal.get_by_mxid(room_id) - if not portal: - return - - await portal.handle_matrix_reaction( - user, event_id, content.relates_to.event_id, content.relates_to.key - ) - - async def handle_presence(self, user_id: UserID, info: PresenceEventContent): - # TODO (#50) - self.log.info(f"user ({user_id}) is present {info}") - if not self.config["bridge.presence"]: - return - - async def handle_typing(self, room_id: RoomID, typing: list[UserID]): - self.log.info(f"room: {room_id}: typing {typing}") - portal: po.Portal | None = await po.Portal.get_by_mxid(room_id) - if not portal: - return - - async def _send_typing(user_id: UserID): - await portal.handle_matrix_typing(await u.User.get_by_mxid(user_id)) - - await asyncio.gather(*(_send_typing(user_id) for user_id in typing)) - - async def handle_ephemeral_event( - self, - evt: ReceiptEvent | PresenceEvent | TypingEvent, - ): - if evt.type == EventType.PRESENCE: - evt = cast(PresenceEvent, evt) - await self.handle_presence(evt.sender, evt.content) - elif evt.type == EventType.TYPING: - evt = cast(TypingEvent, evt) - await self.handle_typing(evt.room_id, evt.content.user_ids) - elif evt.type == EventType.RECEIPT: - await self.handle_receipt(cast(ReceiptEvent, evt)) - - async def handle_event(self, evt: Event): - if evt.type == EventType.ROOM_REDACTION: - await self.handle_redaction(evt.room_id, evt.sender, evt.redacts, evt.event_id) - elif evt.type == EventType.REACTION: - await self.handle_reaction(evt.room_id, evt.sender, evt.event_id, evt.content) diff --git a/linkedin_matrix/portal.py b/linkedin_matrix/portal.py deleted file mode 100644 index ff1d52c..0000000 --- a/linkedin_matrix/portal.py +++ /dev/null @@ -1,1774 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, AsyncGenerator, Literal, cast -from collections import deque -from datetime import datetime, timedelta -from io import BytesIO -from itertools import zip_longest -import asyncio - -from bs4 import BeautifulSoup -import magic - -from linkedin_matrix.db.message import Message -from linkedin_messaging import URN -from linkedin_messaging.api_objects import ( - AttributedBody, - Conversation, - ConversationEvent, - Error, - MediaAttachment, - MessageAttachment, - MessageCreate, - MiniProfile, - ReactionSummary, - RealTimeEventStreamEvent, - ThirdPartyMedia, -) -from mautrix.appservice import IntentAPI -from mautrix.bridge import BasePortal, NotificationDisabler, async_getter_lock -from mautrix.errors import MatrixError, MForbidden -from mautrix.types import ( - AudioInfo, - ContentURI, - EncryptedFile, - EventID, - EventType, - FileInfo, - ImageInfo, - MediaMessageEventContent, - Membership, - MessageEventContent, - MessageType, - RoomID, - TextMessageEventContent, - VideoInfo, -) -from mautrix.types.event.message import Format -from mautrix.types.primitive import UserID -from mautrix.util.message_send_checkpoint import MessageSendCheckpointStatus -from mautrix.util.simple_lock import SimpleLock - -from . import matrix as m, puppet as p, user as u -from .config import Config -from .db import Message as DBMessage, Portal as DBPortal, Reaction as DBReaction -from .formatter import ( - linkedin_spinmail_to_matrix, - linkedin_subject_to_matrix, - linkedin_to_matrix, - matrix_to_linkedin, -) - -if TYPE_CHECKING: - from .__main__ import LinkedInBridge - -try: - from PIL import Image -except ImportError: - Image = None - -try: - from mautrix.crypto.attachments import decrypt_attachment, encrypt_attachment -except ImportError: - decrypt_attachment = encrypt_attachment = None # type: ignore - - -class FakeLock: - async def __aenter__(self): - pass - - async def __aexit__(self, exc_type: Any, exc: Any, tb: Any): - pass - - -StateBridge = EventType.find("m.bridge", EventType.Class.STATE) -StateHalfShotBridge = EventType.find("uk.half-shot.bridge", EventType.Class.STATE) -MediaInfo = FileInfo | VideoInfo | AudioInfo | ImageInfo -ConvertedMessage = tuple[EventType, MessageEventContent] - - -class Portal(DBPortal, BasePortal): - invite_own_puppet_to_pm: bool = False - by_mxid: dict[RoomID, "Portal"] = {} - by_li_thread_urn: dict[tuple[URN, URN | None], "Portal"] = {} - matrix: m.MatrixHandler - config: Config - private_chat_portal_meta: Literal["default", "always", "never"] - - backfill_lock: SimpleLock - _dedup: deque[URN] - _send_locks: dict[URN, asyncio.Lock] - _noop_lock: FakeLock = FakeLock() - - def __init__( - self, - li_thread_urn: URN, - li_receiver_urn: URN | None, - li_is_group_chat: bool, - li_other_user_urn: URN | None = None, - mxid: RoomID | None = None, - name: str | None = None, - photo_id: str | None = None, - avatar_url: ContentURI | None = None, - topic: str | None = None, - name_set: bool = False, - avatar_set: bool = False, - topic_set: bool = False, - encrypted: bool = False, - ): - super().__init__( - li_thread_urn, - li_receiver_urn, - li_is_group_chat, - li_other_user_urn, - mxid, - encrypted, - name, - photo_id, - avatar_url, - topic, - name_set, - avatar_set, - topic_set, - ) - self.log = self.log.getChild(self.li_urn_log) - - self._main_intent = None - self._create_room_lock = asyncio.Lock() - self._dedup = deque(maxlen=100) - self._send_locks = {} - self._typing = set() - - self.backfill_lock = SimpleLock( - "Waiting for backfilling to finish before handling %s", log=self.log - ) - self._backfill_leave: set[IntentAPI] | None = None - - @classmethod - def init_cls(cls, bridge: "LinkedInBridge"): - BasePortal.bridge = bridge - cls.az = bridge.az - cls.config = bridge.config - cls.loop = bridge.loop - cls.matrix = bridge.matrix - cls.invite_own_puppet_to_pm = cls.config["bridge.invite_own_puppet_to_pm"] - cls.private_chat_portal_meta = cls.config["bridge.private_chat_portal_meta"] - NotificationDisabler.puppet_cls = p.Puppet - NotificationDisabler.config_enabled = cls.config["bridge.backfill.disable_notifications"] - - # region DB conversion - - async def delete(self): - if self.mxid: - await DBMessage.delete_all_by_room(self.mxid) - self.by_mxid.pop(self.mxid, None) - self.by_li_thread_urn.pop(self.li_urn_full, None) - await super().delete() - - # endregion - - # region Send lock handling - - def require_send_lock(self, li_member_urn: URN) -> asyncio.Lock: - try: - lock = self._send_locks[li_member_urn] - except KeyError: - lock = asyncio.Lock() - self._send_locks[li_member_urn] = lock - return lock - - def optional_send_lock(self, li_member_urn: URN) -> asyncio.Lock | FakeLock: - try: - return self._send_locks[li_member_urn] - except KeyError: - pass - return self._noop_lock - - # endregion - - # region Properties - - @property - def li_urn_full(self) -> tuple[URN, URN | None]: - return self.li_thread_urn, self.li_receiver_urn - - @property - def main_intent(self) -> IntentAPI: - if not self._main_intent: - raise ValueError("Portal must be postinit()ed before main_intent can be used") - return self._main_intent - - @property - def set_dm_room_metadata(self) -> bool: - return ( - not self.is_direct - or self.private_chat_portal_meta == "always" - or (self.encrypted and self.private_chat_portal_meta != "never") - ) - - @property - def is_direct(self) -> bool: - return not self.li_is_group_chat - - @property - def li_urn_log(self) -> str: - if self.is_direct: - return f"{self.li_thread_urn}<->{self.li_receiver_urn}" - return str(self.li_thread_urn) - - # endregion - - # region Database getters - - async def postinit(self): - self.by_li_thread_urn[self.li_urn_full] = self - if self.mxid: - self.by_mxid[self.mxid] = self - - if self.is_direct: - if not self.li_other_user_urn: - raise ValueError("Portal.li_other_user_urn not set for private chat") - self._main_intent = ( - await p.Puppet.get_by_li_member_urn(self.li_other_user_urn) - ).default_mxid_intent - else: - self._main_intent = self.az.intent - - @classmethod - @async_getter_lock - async def get_by_mxid(cls, mxid: RoomID) -> Portal | None: - try: - return cls.by_mxid[mxid] - except KeyError: - pass - - portal = cast("Portal", await super().get_by_mxid(mxid)) - if portal: - await portal.postinit() - return portal - - return None - - @classmethod - @async_getter_lock - async def get_by_li_thread_urn( - cls, - li_thread_urn: URN, - *, - li_receiver_urn: URN | None = None, - li_is_group_chat: bool = False, - li_other_user_urn: URN | None = None, - create: bool = True, - ) -> Portal | None: - try: - return cls.by_li_thread_urn[(li_thread_urn, li_receiver_urn)] - except KeyError: - pass - - portal = cast( - Portal, - await super().get_by_li_thread_urn(li_thread_urn, li_receiver_urn), - ) - if not portal: - if create: - portal = cls( - li_thread_urn, - li_receiver_urn=li_receiver_urn, - li_is_group_chat=li_is_group_chat, - li_other_user_urn=li_other_user_urn, - ) - await portal.insert() - else: - return None - - if li_other_user_urn is not None: - portal.li_other_user_urn = li_other_user_urn - await portal.save() - await portal.postinit() - return portal - - @classmethod - async def get_all_by_li_receiver_urn( - cls, - li_receiver_urn: URN, - ) -> AsyncGenerator["Portal", None]: - portals = await super().get_all_by_li_receiver_urn(li_receiver_urn) - for portal in portals: - portal = cast(Portal, portal) - try: - yield cls.by_li_thread_urn[(portal.li_thread_urn, portal.li_receiver_urn)] - except KeyError: - await portal.postinit() - yield portal - - @classmethod - async def all(cls) -> AsyncGenerator["Portal", None]: - portals = await super().all() - for portal in cast(list[Portal], portals): - try: - yield cls.by_li_thread_urn[(portal.li_thread_urn, portal.li_receiver_urn)] - except KeyError: - await portal.postinit() - yield portal - - async def get_dm_puppet(self) -> p.Puppet | None: - if not self.is_direct: - return None - return await p.Puppet.get_by_li_member_urn(self.li_other_user_urn) - - # endregion - - # region Chat info updating - - async def update_info( - self, - source: u.User | None = None, - conversation: Conversation | None = None, - ): - if not conversation: - # shouldn't happen currently - assert False, "update_info called without conversation" - - if conversation.entity_urn != self.li_thread_urn: - self.log.warning( - f"Got different ID ({conversation.entity_urn}) than what was asked " - f"for ({self.li_thread_urn}) when fetching" - ) - - changed = False - if self.is_direct: - if ( - len(conversation.participants) - and (mm := conversation.participants[0].messaging_member) - and (mp := mm.mini_profile) - ): - changed = await self._update_topic(mp) or changed - else: - changed = await self._update_name(conversation.name) or changed - - changed = await self._update_participants(source, conversation) or changed - if changed: - await self.update_bridge_info() - await self.save() - - async def _update_name(self, name: str) -> bool: - if not name: - self.log.warning("Got empty name in _update_name call") - return False - if self.name != name or (not self.name_set and self.set_dm_room_metadata): - self.log.trace("Updating name %s -> %s", self.name, name) - self.name = name - self.name_set = False - if self.mxid and self.set_dm_room_metadata: - try: - await self.main_intent.set_room_name(self.mxid, self.name) - self.name_set = True - except Exception: - self.log.exception("Failed to set room name") - self.name_set = False - return True - return False - - async def _update_photo_from_puppet(self, puppet: "p.Puppet") -> bool: - if self.photo_id == puppet.photo_id and (self.avatar_set or not self.set_dm_room_metadata): - return False - self.avatar_set = False - self.photo_id = puppet.photo_id - if puppet.photo_mxc: - self.avatar_url = puppet.photo_mxc - elif self.photo_id: - profile = await self.main_intent.get_profile(puppet.default_mxid) - self.avatar_url = profile.avatar_url - puppet.photo_mxc = profile.avatar_url - else: - self.avatar_url = ContentURI("") - if self.mxid and self.set_dm_room_metadata: - try: - await self.main_intent.set_room_avatar(self.mxid, self.avatar_url) - self.avatar_set = True - except Exception: - self.log.exception("Failed to set room avatar") - return True - - async def _update_topic(self, mini_profile: MiniProfile) -> bool: - if not self.config["bridge.set_topic_on_dms"]: - return False - - topic_parts = [ - part - for part in [ - mini_profile.occupation, - ( - f"https://www.linkedin.com/in/{mini_profile.public_identifier}" - if ( - mini_profile.public_identifier - and mini_profile.public_identifier != "UNKNOWN" - ) - else None - ), - ] - if part - ] - topic = " | ".join(topic_parts) if len(topic_parts) else None - if topic == self.topic and self.topic_set: - return False - self.topic = topic - - if not self.topic and not self.topic_set: - self.topic_set = True - return False - - if self.mxid: - try: - await self.main_intent.set_room_topic(self.mxid, self.topic or "") - self.topic_set = True - except Exception: - self.log.exception("Failed to set room topic") - self.topic_set = False - - return True - - async def update_bridge_info(self): - if not self.mxid: - self.log.debug("Not updating bridge info: no Matrix room created") - return - try: - self.log.debug("Updating bridge info...") - await self.main_intent.send_state_event( - self.mxid, - StateBridge, - self.bridge_info, - self.bridge_info_state_key, - ) - # TODO (#52) remove this once - # https://github.com/matrix-org/matrix-doc/pull/2346 is in spec - await self.main_intent.send_state_event( - self.mxid, - StateHalfShotBridge, - self.bridge_info, - self.bridge_info_state_key, - ) - except Exception: - self.log.warning("Failed to update bridge info", exc_info=True) - - async def _update_participants( - self, - source: "u.User", - conversation: Conversation | None = None, - ) -> bool: - changed = False - - for participant in conversation.participants if conversation else []: - if ( - not (mm := participant.messaging_member) - or not (mp := mm.mini_profile) - or not (entity_urn := mp.entity_urn) - ): - self.log.error(f"No entity_urn on participant! {participant}") - continue - participant_urn = entity_urn - if participant_urn == URN("UNKNOWN"): - participant_urn = conversation.entity_urn - puppet = await p.Puppet.get_by_li_member_urn(participant_urn) - await puppet.update_info(source, participant.messaging_member) - if self.is_direct and self.li_other_user_urn == puppet.li_member_urn: - changed = await self._update_name(puppet.name) or changed - changed = await self._update_photo_from_puppet(puppet) or changed - - if self.mxid: - if puppet.li_member_urn != self.li_receiver_urn or puppet.is_real_user: - await puppet.intent_for(self).ensure_joined(self.mxid, bot=self.main_intent) - - if source.space_mxid: - try: - await self.az.intent.invite_user( - source.space_mxid, puppet.custom_mxid or puppet.mxid - ) - await puppet.intent.join_room_by_id(source.space_mxid) - except Exception as e: - self.log.warning( - f"Failed to invite and join puppet {puppet.li_member_urn} to " - f"space {source.space_mxid}: {e}" - ) - - return changed - - # endregion - - # region Matrix room creation - - async def create_matrix_room( - self, - source: "u.User", - conversation: Conversation | None = None, - ) -> RoomID | None: - if self.mxid: - try: - await self._update_matrix_room(source, conversation) - except Exception: - self.log.exception("Failed to update portal") - return self.mxid - - async with self._create_room_lock: - try: - return await self._create_matrix_room(source, conversation) - except Exception: - self.log.exception("Failed to create portal") - return None - - async def update_matrix_room( - self, - source: "u.User", - conversation: Conversation | None = None, - ): - try: - await self._update_matrix_room(source, conversation) - except Exception: - self.log.exception("Failed to update portal") - - def _get_invite_content(self, double_puppet: p.Puppet | None) -> dict[str, Any]: - invite_content = {} - if double_puppet: - invite_content["fi.mau.will_auto_accept"] = True - if self.is_direct: - invite_content["is_direct"] = True - return invite_content - - async def _create_matrix_room( - self, - source: "u.User", - conversation: Conversation | None = None, - ) -> RoomID | None: - if self.mxid: - await self._update_matrix_room(source, conversation) - return self.mxid - - # Update info before computing initial state because self.bridge_info depends on - # things that are set by this function. - await self.update_info(source, conversation) - - self.log.debug("Creating Matrix room") - name: str | None = None - initial_state = [ - { - "type": str(StateBridge), - "state_key": self.bridge_info_state_key, - "content": self.bridge_info, - }, - { - # TODO (#52) remove this once - # https://github.com/matrix-org/matrix-doc/pull/2346 is in spec - "type": str(StateHalfShotBridge), - "state_key": self.bridge_info_state_key, - "content": self.bridge_info, - }, - ] - invites = [] - if self.config["bridge.encryption.default"] and self.matrix.e2ee: - self.encrypted = True - initial_state.append( - { - "type": "m.room.encryption", - "content": self.get_encryption_state_event_json(), - } - ) - if self.is_direct: - invites.append(self.az.bot_mxid) - - if self.topic: - initial_state.append( - { - "type": str(EventType.ROOM_TOPIC), - "content": {"topic": self.topic}, - } - ) - - # if not info: - # self.log.debug( - # "update_info() didn't return info, cancelling room creation") - # return None - - if self.set_dm_room_metadata: - name = self.name - initial_state.append( - { - "type": str(EventType.ROOM_AVATAR), - "content": {"url": self.avatar_url}, - } - ) - - # We lock backfill lock here so any messages that come between the room being - # created and the initial backfill finishing wouldn't be bridged before the - # backfill messages. - with self.backfill_lock: - creation_content = {} - if not self.config["bridge.federate_rooms"]: - creation_content["m.federate"] = False - - self.mxid = await self.main_intent.create_room( - name=name, - is_direct=self.is_direct, - initial_state=initial_state, - invitees=invites, - creation_content=creation_content, - ) - if not self.mxid: - raise Exception("Failed to create room: no mxid returned") - self.name_set = bool(name) - self.avatar_set = bool(self.avatar_url) and self.set_dm_room_metadata - - if self.encrypted and self.matrix.e2ee and self.is_direct: - try: - await self.az.intent.ensure_joined(self.mxid) - except Exception: - self.log.warning(f"Failed to add bridge bot to new private chat {self.mxid}") - - if source.space_mxid: - try: - await self.az.intent.send_state_event( - source.space_mxid, - EventType.SPACE_CHILD, - {"via": [self.config["homeserver.domain"]], "suggested": True}, - state_key=str(self.mxid), - ) - await self.az.intent.invite_user(source.space_mxid, source.mxid) - except Exception: - self.log.warning(f"Failed to add chat {self.mxid} to user's space") - - await self.save() - self.log.debug(f"Matrix room created: {self.mxid}") - self.by_mxid[self.mxid] = self - - puppet = await p.Puppet.get_by_custom_mxid(source.mxid) - await self.main_intent.invite_user( - self.mxid, source.mxid, extra_content=self._get_invite_content(puppet) - ) - - if puppet: - try: - if self.is_direct: - await source.update_direct_chats({self.main_intent.mxid: [self.mxid]}) - await puppet.intent.join_room_by_id(self.mxid) - except MatrixError: - self.log.debug( - "Failed to join custom puppet into newly created portal", - exc_info=True, - ) - - if self.is_direct: - # Check if this is an ad. If it is, then set the room permissions such - # that the user is not allowed to send messages. (If they do send a - # message, the bridge would give an error since responding to such - # messages is not allowed.) - if ( - (mm := conversation.participants[0].messaging_member) - and (mp := mm.mini_profile) - and (mp.entity_urn == URN("UNKNOWN")) - ): - await self._disable_responding() - - puppet = await p.Puppet.get_by_custom_mxid(source.mxid) - if puppet: - try: - did_join = await puppet.intent.join_room_by_id(self.mxid) - if did_join: - await source.update_direct_chats({self.main_intent.mxid: [self.mxid]}) - if source.space_mxid: - await self.az.intent.invite_user(source.space_mxid, puppet.custom_mxid) - await puppet.intent.join_room_by_id(source.space_mxid) - except MatrixError: - self.log.debug( - "Failed to join custom puppet into newly created portal", - exc_info=True, - ) - - await self._update_participants(source, conversation) - - try: - await self.backfill(source, conversation, is_initial=True) - except Exception: - self.log.exception("Failed to backfill new portal") - - # await self._sync_read_receipts(info.read_receipts.nodes) - - return self.mxid - - async def _update_matrix_room( - self, - source: "u.User", - conversation: Conversation | None = None, - ): - puppet = await p.Puppet.get_by_custom_mxid(source.mxid) - await self.main_intent.invite_user( - self.mxid, - source.mxid, - check_cache=False, - extra_content=self._get_invite_content(puppet), - ) - if puppet and puppet.is_real_user: - await puppet.intent.ensure_joined(self.mxid) - - if source.space_mxid and self.mxid: - await self.az.intent.send_state_event( - source.space_mxid, - EventType.SPACE_CHILD, - {"via": [self.config["homeserver.domain"]], "suggested": True}, - state_key=str(self.mxid), - ) - await self.update_info(source, conversation) - - @property - def bridge_info_state_key(self) -> str: - return f"com.github.linkedin://linkedin/{self.li_thread_urn.id_str()}" - - @property - def bridge_info(self) -> dict[str, Any]: - return { - "bridgebot": self.az.bot_mxid, - "creator": self.main_intent.mxid, - "protocol": { - "id": "linkedin", - "displayname": "LinkedIn Messages", - "avatar_url": self.config["appservice.bot_avatar"], - }, - "channel": { - "id": self.li_thread_urn.id_str(), - "displayname": self.name, - "avatar_url": self.avatar_url, - }, - } - - # endregion - - # region Event backfill - - async def backfill( - self, - source: "u.User", - conversation: Conversation | None, - is_initial: bool, - ): - assert self.li_receiver_urn - limit: int | None = ( - self.config["bridge.backfill.initial_limit"] - if is_initial - else self.config["bridge.backfill.missed_limit"] - ) - if limit == 0: - return - elif limit and limit < 0: - limit = None - last_active = None - - if not is_initial and conversation and len(conversation.events) > 0: - last_active = conversation.events[-1].created_at - - most_recent = await DBMessage.get_most_recent(self.li_thread_urn, self.li_receiver_urn) - if most_recent and is_initial: - self.log.debug(f"Not backfilling {self.li_urn_log}: already bridged messages found") - elif (not most_recent or not most_recent.timestamp) and not is_initial: - self.log.debug(f"Not backfilling {self.li_urn_log}: no most recent message found") - elif last_active and most_recent and most_recent.timestamp >= last_active: - self.log.debug( - f"Not backfilling {self.li_urn_log}: last activity is equal to most " - f"recent bridged message ({most_recent.timestamp} >= {last_active})" - ) - elif conversation: - with self.backfill_lock: - await self._backfill( - source, - limit, - most_recent.timestamp if most_recent else None, - conversation=conversation, - ) - - if ( - conversation.unread_count == 0 - or ( - (hours := self.config["bridge.backfill.unread_hours_threshold"]) > 0 - and conversation.last_activity_at - and conversation.last_activity_at < datetime.now() - timedelta(hours=hours) - ) - ) and ( - most_recent := await DBMessage.get_most_recent( - self.li_thread_urn, self.li_receiver_urn - ) - ): - puppet = await source.bridge.get_double_puppet(source.mxid) - if puppet and puppet.is_real_user: - if most_recent_reaction := await DBReaction.get_most_recent_by_li_message_urn( - self.mxid, most_recent.li_message_urn - ): - await puppet.intent.mark_read(self.mxid, most_recent_reaction.mxid) - else: - await puppet.intent.mark_read(self.mxid, most_recent.mxid) - - async def _backfill( - self, - source: "u.User", - limit: int | None, - after_timestamp: datetime | None, - conversation: Conversation, - ): - assert self.mxid - assert conversation.entity_urn - assert source.client, f"No client found for {source.mxid}!" - self.log.debug(f"Backfilling history through {source.mxid}") - messages = conversation.events - - if len(messages): - oldest_message = messages[0] - before_timestamp = oldest_message.created_at - else: - before_timestamp = datetime.now() - - self.log.debug(f"Fetching up to {limit} messages through {source.li_member_urn}") - - while limit is None or len(messages) < limit: - result = await source.client.get_conversation( - conversation.entity_urn, - created_before=before_timestamp, - ) - elements = result.elements - messages = elements + messages - - if len(elements) < 20: - break - - if ( - len(elements) - and elements[0].created_at - and after_timestamp - and (created_at := elements[0].created_at) <= after_timestamp - ): - self.log.debug( - f"Stopping fetching messages at {created_at} as message is older " - f"than newest bridged message ({created_at} < {after_timestamp})", - ) - break - - before_timestamp = messages[0].created_at - - if after_timestamp: - try: - slice_index = next( - index - for index, message in enumerate(messages) - if message.created_at and message.created_at > after_timestamp - ) - messages = messages[slice_index:] - except StopIteration: - messages = [] - - if limit and len(messages) > limit: - messages = messages[-limit:] - - self._backfill_leave = set() - async with NotificationDisabler(self.mxid, source): - for message in messages: - if ( - not (f := message.from_) - or not (mm := f.messaging_member) - or not (mp := mm.mini_profile) - or not (entity_urn := mp.entity_urn) - ): - self.log.error("No entity_urn found on message mini_profile!", message) - continue - member_urn = entity_urn - if member_urn == URN("UNKNOWN"): - member_urn = conversation.entity_urn - puppet = await p.Puppet.get_by_li_member_urn(member_urn) - await self.handle_linkedin_message(source, puppet, message) - for intent in self._backfill_leave: - self.log.trace(f"Leaving room with {intent.mxid} post-backfill") - await intent.leave_room(self.mxid) - self.log.info(f"Backfilled {len(messages)} messages through {source.mxid}") - - # endregion - - # region Matrix event handling - - async def _send_delivery_receipt(self, event_id: EventID): - if event_id and self.config["bridge.delivery_receipts"] and self.mxid: - try: - await self.az.intent.mark_read(self.mxid, event_id) - except Exception: - self.log.exception(f"Failed to send delivery receipt for {event_id}") - - async def _send_bridge_error(self, msg: str, certain_failure: bool = False): - certainty = "was not" if certain_failure else "may not have been" - await self._send_message( - self.main_intent, - TextMessageEventContent( - msgtype=MessageType.NOTICE, - body=f"\u26a0 Your message {certainty} bridged: {msg}", - ), - ) - - async def handle_matrix_leave(self, user: "u.User"): - if self.is_direct: - self.log.info(f"{user.mxid} left private chat portal with {self.li_other_user_urn}") - if user.li_member_urn == self.li_receiver_urn: - self.log.info( - f"{user.mxid} was the recipient of this portal. " "Cleaning up and deleting..." - ) - await self.cleanup_and_delete() - - if user.space_mxid: - await self.az.intent.send_state_event( - user.space_mxid, - EventType.SPACE_CHILD, - {}, - state_key=str(self.mxid), - ) - else: - self.log.debug(f"{user.mxid} left portal to {self.li_other_user_urn}") - - async def handle_matrix_message( - self, - sender: "u.User", - message: MessageEventContent, - event_id: EventID, - ): - assert self.mxid - - exception: Exception | None = None - status = MessageSendCheckpointStatus.PERM_FAILURE - try: - await self._handle_matrix_message(sender, message, event_id) - except NotImplementedError as e: - self.log.exception(f"Got NotImplementedError while handling {event_id}") - await self._send_bridge_error(f"Event is unsupported: {e}", certain_failure=True) - exception = e - status = MessageSendCheckpointStatus.UNSUPPORTED - except Error as e: - self.log.exception(f"Failed handling {event_id}: {e.to_json()}") - await self._send_bridge_error(e.to_json()) - exception = e - except Exception as e: - self.log.exception(f"Failed handling {event_id}") - await self._send_bridge_error(str(e)) - exception = e - - if exception: - sender.send_remote_checkpoint( - status, - event_id, - self.mxid, - EventType.ROOM_MESSAGE, - message.msgtype, - error=exception, - ) - - async def _handle_matrix_message( - self, - sender: "u.User", - message: MessageEventContent, - event_id: EventID, - ): - if message.msgtype.is_text: - await self._handle_matrix_text( - event_id, - sender, - cast(TextMessageEventContent, message), - ) - elif message.msgtype.is_media: - await self._handle_matrix_media( - event_id, - sender, - cast(MediaMessageEventContent, message), - ) - else: - raise NotImplementedError(f"Messages of type {message.msgtype} are not supported.") - - async def _send_linkedin_message( - self, - event_id: EventID, - sender: "u.User", - message_create: MessageCreate, - message_type: MessageType, - ) -> DBMessage: - assert self.mxid - assert self.li_receiver_urn - assert sender.client - assert sender.li_member_urn - - async with self.require_send_lock(sender.li_member_urn): - resp = await sender.client.send_message(self.li_thread_urn, message_create) - if not resp.value or not resp.value.event_urn: - raise Exception("Response value was None.") - - sender.send_remote_checkpoint( - MessageSendCheckpointStatus.SUCCESS, - event_id, - self.mxid, - EventType.ROOM_MESSAGE, - message_type, - ) - message = DBMessage( - mxid=event_id, - mx_room=self.mxid, - li_message_urn=resp.value.event_urn, - li_thread_urn=self.li_thread_urn, - li_sender_urn=sender.li_member_urn, - li_receiver_urn=self.li_receiver_urn, - index=0, - timestamp=datetime.now(), - ) - await self._send_delivery_receipt(event_id) - self._dedup.append(resp.value.event_urn) - await message.insert() - return message - - async def _handle_matrix_text( - self, - event_id: EventID, - sender: "u.User", - message: TextMessageEventContent, - ): - assert sender.client - message_create = await matrix_to_linkedin(message, sender, self.main_intent, self.log) - await self._send_linkedin_message( - event_id, - sender, - message_create, - message.msgtype, - ) - - async def _handle_matrix_media( - self, - event_id: EventID, - sender: "u.User", - message: MediaMessageEventContent, - ): - assert sender.client - if not message.info: - return - - if message.file and message.file.url and decrypt_attachment: - data = await self.main_intent.download_media(message.file.url) - file_hash = message.file.hashes.get("sha256") - if file_hash: - data = decrypt_attachment( - data, - message.file.key.key, - file_hash, - message.file.iv, - ) - else: - return - elif message.url: - data = await self.main_intent.download_media(message.url) - else: - return - - attachment = await sender.client.upload_media(data, message.body, message.info.mimetype) - attachment.media_type = attachment.media_type or "" - await self._send_linkedin_message( - event_id, - sender, - MessageCreate(AttributedBody(), attachments=[attachment]), - message.msgtype, - ) - - async def handle_matrix_redaction( - self, - sender: "u.User", - event_id: EventID, - redaction_event_id: EventID, - ): - try: - await self._handle_matrix_redaction(sender, event_id) - except Exception as e: - sender.send_remote_checkpoint( - MessageSendCheckpointStatus.PERM_FAILURE, - redaction_event_id, - self.mxid, - EventType.ROOM_REDACTION, - error=e, - ) - else: - sender.send_remote_checkpoint( - MessageSendCheckpointStatus.SUCCESS, - redaction_event_id, - self.mxid, - EventType.ROOM_REDACTION, - ) - await self._send_delivery_receipt(redaction_event_id) - - async def _handle_matrix_redaction(self, sender: "u.User", event_id: EventID): - if not self.mxid or not sender.client or not sender.li_member_urn: - return - - message = await DBMessage.get_by_mxid(event_id, self.mxid) - if message: - self.log.info(f"Deleting {message.li_message_urn} in {self.li_thread_urn}") - await message.delete() - await sender.client.delete_message(self.li_thread_urn, message.li_message_urn) - return - - reaction = await DBReaction.get_by_mxid(event_id, self.mxid) - if reaction: - self.log.info( - f"Deleting reaction {reaction.reaction} from {reaction.li_message_urn}" - f" in {self.li_thread_urn}" - ) - await reaction.delete() - await sender.client.remove_emoji_reaction( - self.li_thread_urn, reaction.li_message_urn, emoji=reaction.reaction - ) - return - - raise Exception("No message or reaction found for redaction") - - async def handle_matrix_reaction( - self, - sender: "u.User", - event_id: EventID, - reacting_to: EventID, - reaction: str, - ): - if not sender.li_member_urn or not self.mxid or not sender.client: - return - async with self.require_send_lock(sender.li_member_urn): - message = await DBMessage.get_by_mxid(reacting_to, self.mxid) - if not message: - self.log.debug(f"Ignoring reaction to unknown event {reacting_to}") - return - - try: - await sender.client.add_emoji_reaction( - self.li_thread_urn, message.li_message_urn, reaction - ) - except Exception: - self.log.exception("Failed to send emoji reaction") - raise - else: - sender.send_remote_checkpoint( - MessageSendCheckpointStatus.SUCCESS, - event_id, - self.mxid, - EventType.REACTION, - ) - await DBReaction( - mxid=event_id, - mx_room=message.mx_room, - li_message_urn=message.li_message_urn, - li_receiver_urn=self.li_receiver_urn, - li_sender_urn=sender.li_member_urn, - reaction=reaction, - ).insert() - await self._send_delivery_receipt(event_id) - - async def handle_matrix_typing(self, source: "u.User"): - await source.client.set_typing(self.li_thread_urn) - - # endregion - - # region LinkedIn event handling - - async def _bridge_own_message_pm( - self, - source: "u.User", - sender: "p.Puppet", - mid: str, - invite: bool = True, - ) -> bool: - assert self.mxid - if ( - self.is_direct - and sender.li_member_urn == source.li_member_urn - and not sender.is_real_user - ): - if self.invite_own_puppet_to_pm and invite: - await self.main_intent.invite_user(self.mxid, UserID(sender.mxid)) - elif ( - await self.az.state_store.get_membership(self.mxid, UserID(sender.mxid)) - != Membership.JOIN - ): - self.log.warning( - f"Ignoring own {mid} in private chat because own puppet is not in" " room." - ) - return False - return True - - async def handle_linkedin_message( - self, source: "u.User", sender: "p.Puppet", message: ConversationEvent - ): - try: - if message.subtype == "CONVERSATION_UPDATE": - if ( - (ec := message.event_content) - and (me := ec.message_event) - and (cc := me.custom_content) - and (nu := cc.conversation_name_update_content) - ): - await self._update_name(nu.new_name) - elif (ec := message.event_content) and (me := ec.message_event) and me.recalled_at: - await self._handle_linkedin_message_deletion(sender, message) - elif (ec := message.event_content) and (me := ec.message_event) and me.last_edited_at: - await self._handle_linkedin_message_edit(source, sender, message) - else: - await self._handle_linkedin_message(source, sender, message) - except Exception as e: - self.log.exception(f"Error handling LinkedIn message {message.entity_urn}: {e}") - - async def _disable_responding(self, message: str | None = None): - levels = await self.main_intent.get_power_levels(self.mxid) - if levels.get_user_level(self.main_intent.mxid) == 100: - levels.events_default = 50 - await self.main_intent.set_power_levels(self.mxid, levels) - if message: - await self._send_message( - self.main_intent, - TextMessageEventContent(msgtype=MessageType.NOTICE, body=message), - ) - - async def _convert_linkedin_message( - self, source: "u.User", intent: IntentAPI, message: ConversationEvent - ) -> list[ConvertedMessage]: - if not message.event_content or not message.event_content.message_event: - return [] - message_event = message.event_content.message_event - - converted: list[ConvertedMessage] = [] - - # Handle subject - if message_event.subject: - content = linkedin_subject_to_matrix(message_event.subject) - converted.append((EventType.ROOM_MESSAGE, content)) - - # Handle attachments - converted.extend( - await self._convert_linkedin_media_attachments( - source, intent, message_event.media_attachments - ) - ) - converted.extend( - await self._convert_linkedin_attachments(source, intent, message_event.attachments) - ) - - # Handle custom content - if cc := message_event.custom_content: - if cc.third_party_media: - converted.extend( - await self._convert_linkedin_third_party_media( - source, - intent, - cc.third_party_media, - ) - ) - - # Handle InMail message text - if cc.sp_inmail_content: - content = await linkedin_spinmail_to_matrix(cc.sp_inmail_content) - converted.append((EventType.ROOM_MESSAGE, content)) - await self._disable_responding() - - # Handle the normal message text itself - if message_event.attributed_body and message_event.attributed_body.text: - if message.subtype == "SPONSORED_MESSAGE": - content = TextMessageEventContent( - msgtype=MessageType.TEXT, - body=BeautifulSoup(message_event.attributed_body.text).text, - format=Format.HTML, - formatted_body=message_event.attributed_body.text, - ) - else: - content = await linkedin_to_matrix(message_event.attributed_body) - converted.append((EventType.ROOM_MESSAGE, content)) - if message.subtype == "SPONSORED_MESSAGE": - await self._disable_responding("Open the LinkedIn app to respond to this message") - - # Handle shared posts - if f := message_event.feed_update: - plaintext_content = "Feed update shared:\n" - html_content = "Feed update shared:
" - if (c := f.commentary) and (ct := c.text) and (text := ct.text): - plaintext_content += text + "\n" - html_content += text + "
" - - if ( - (c := f.content) - and (ac := c.article_component) - and (nc := ac.navigation_context) - and (target := nc.action_target) - ): - plaintext_content += target - html_content += f'{target}' - - content = TextMessageEventContent( - msgtype=MessageType.TEXT, - body=plaintext_content, - format=Format.HTML, - formatted_body=html_content, - ) - converted.append((EventType.ROOM_MESSAGE, content)) - - return converted - - async def _handle_linkedin_message( - self, source: "u.User", sender: "p.Puppet", message: ConversationEvent - ): - assert self.mxid - assert self.li_receiver_urn - assert message.entity_urn - li_message_urn = message.entity_urn - - # Check in-memory queue for duplicates - message_exists = False - event_ids: list[EventID] = [] - async with self.require_send_lock(sender.li_member_urn): - if li_message_urn in self._dedup: - self.log.trace(f"Not handling message {li_message_urn}, found ID in dedup queue") - # Return here, because it is in the process of being handled. - return - self._dedup.appendleft(li_message_urn) - - # Check database for duplicates - dbm = await DBMessage.get_all_by_li_message_urn(li_message_urn, self.li_receiver_urn) - if len(dbm) > 0: - self.log.debug( - f"Not handling message {li_message_urn}, found duplicate in database." - ) - # Don't return here because we may need to update the reactions. - message_exists = True - event_ids = [dbm.mxid for dbm in sorted(dbm, key=lambda m: m.index)] - - intent = sender.intent_for(self) - if not message_exists: - self.log.trace("LinkedIn event content: %s", message) - if not self.mxid: - mxid = await self.create_matrix_room(source) - if not mxid: - # Failed to create - return - if not await self._bridge_own_message_pm(source, sender, f"message {li_message_urn}"): - return - - if ( - self._backfill_leave is not None - and self.li_other_user_urn != sender.li_member_urn - and intent != sender.intent - and intent not in self._backfill_leave - ): - self.log.debug("Adding %s's default puppet to room for backfilling", sender.mxid) - await self.main_intent.invite_user(self.mxid, intent.mxid) - await intent.ensure_joined(self.mxid) - self._backfill_leave.add(intent) - - timestamp = message.created_at or datetime.now() - event_ids = [] - for event_type, content in await self._convert_linkedin_message( - source, intent, message - ): - event_ids.append( - await self._send_message( - intent, content, event_type=event_type, timestamp=timestamp - ) - ) - event_ids = [event_id for event_id in event_ids if event_id] - if not event_ids: - self.log.warning(f"Unhandled LinkedIn message {message.entity_urn}") - return - - # Save all of the messages in the database. - self.log.debug(f"Handled LinkedIn message {li_message_urn} -> {event_ids}") - await DBMessage.bulk_create( - li_message_urn=li_message_urn, - li_thread_urn=self.li_thread_urn, - li_sender_urn=sender.li_member_urn, - li_receiver_urn=self.li_receiver_urn, - mx_room=self.mxid, - timestamp=timestamp, - event_ids=event_ids, - ) - await self._send_delivery_receipt(event_ids[-1]) - # end if message_exists - - # Handle reactions - reaction_event_id = event_ids[-1] # react to the last event - for reaction_summary in message.reaction_summaries: - await self._handle_reaction_summary( - li_message_urn, - source, - reaction_event_id, - reaction_summary, - message.created_at, - ) - - async def _redact_and_delete_message( - self, sender: "p.Puppet", msg: Message, timestamp: datetime | None - ): - try: - await sender.intent_for(self).redact(msg.mx_room, msg.mxid, timestamp=timestamp) - except MForbidden: - await self.main_intent.redact(msg.mx_room, msg.mxid, timestamp=timestamp) - await msg.delete() - - async def _handle_linkedin_message_deletion( - self, - sender: "p.Puppet", - message: ConversationEvent, - ): - if not self.mxid or not self.li_receiver_urn: - return - assert message.entity_urn - for db_message in await DBMessage.get_all_by_li_message_urn( - message.entity_urn, self.li_receiver_urn - ): - await self._redact_and_delete_message(sender, db_message, message.created_at) - - async def _handle_linkedin_message_edit( - self, - source: "u.User", - sender: "p.Puppet", - message: ConversationEvent, - ): - if not self.mxid or not self.li_receiver_urn: - return - assert message.entity_urn - assert message.event_content - assert message.event_content.message_event - intent = sender.intent_for(self) - converted = await self._convert_linkedin_message(source, intent, message) - timestamp = message.event_content.message_event.last_edited_at or datetime.now() - - messages = await DBMessage.get_all_by_li_message_urn( - message.entity_urn, - self.li_receiver_urn, - ) - - event_ids = [] - for old_message, new_message in zip_longest(messages, converted): - if not new_message: - # There are extra old messages, delete them. - await self._redact_and_delete_message(sender, old_message, timestamp) - continue - - new_event_type, content = new_message - if old_message: - content.set_edit(old_message.mxid) - event_id = await self._send_message( - intent, - content, - event_type=new_event_type, - timestamp=timestamp, - ) - if not old_message: - # If this event is new, we need to save it to the database. - event_ids.append(event_id) - event_ids = [event_id for event_id in event_ids if event_id] - if not event_ids: - self.log.warning(f"Unhandled LinkedIn message edit {message.entity_urn}") - return - - # Save all of the messages in the database. - self.log.debug(f"Handled LinkedIn message edit {message.entity_urn} -> {event_ids}") - await DBMessage.bulk_create( - li_message_urn=message.entity_urn, - li_thread_urn=self.li_thread_urn, - li_sender_urn=sender.li_member_urn, - li_receiver_urn=self.li_receiver_urn, - mx_room=self.mxid, - timestamp=timestamp, - event_ids=event_ids, - ) - await self._send_delivery_receipt(event_ids[-1]) - - async def _handle_reaction_summary( - self, - li_message_urn: URN, - source: "u.User", - reaction_event_id: EventID, - reaction_summary: ReactionSummary, - timestamp: datetime | None, - ) -> list[EventID]: - if not reaction_summary.emoji or not source.client: - return [] - - assert self.mxid - assert self.li_receiver_urn - - emoji = reaction_summary.emoji - reactors = await source.client.get_reactors(li_message_urn, emoji) - - mxids = [] - for reactor in reactors.elements: - sender = await p.Puppet.get_by_li_member_urn(reactor.reactor_urn) - intent = sender.intent_for(self) - - mxid = await intent.react( - self.mxid, reaction_event_id, reaction_summary.emoji, timestamp=timestamp - ) - mxids.append(mxid) - - self.log.debug( - f"{sender.mxid} reacted to {reaction_event_id} with " - f"{reaction_summary.emoji}, got {mxid}." - ) - - await DBReaction( - mxid=mxid, - mx_room=self.mxid, - li_message_urn=li_message_urn, - li_receiver_urn=self.li_receiver_urn, - li_sender_urn=sender.li_member_urn, - reaction=reaction_summary.emoji, - ).insert() - - return mxids - - async def _convert_linkedin_attachments( - self, - source: "u.User", - intent: IntentAPI, - attachments: list[MessageAttachment], - ) -> list[ConvertedMessage]: - converted = [] - for attachment in attachments: - if not attachment.reference: - continue - url = attachment.reference.string - if not url: - continue - - msgtype = MessageType.FILE - if attachment.media_type.startswith("image/"): - msgtype = MessageType.IMAGE - else: - msgtype = MessageType.FILE - - mxc, info, decryption_info = await self._reupload_linkedin_file( - url, source, intent, encrypt=self.encrypted, find_size=True - ) - content = MediaMessageEventContent( - url=mxc, - file=decryption_info, - info=info, - msgtype=msgtype, - body=attachment.name, - ) - converted.append((EventType.ROOM_MESSAGE, content)) - - return converted - - async def _convert_linkedin_media_attachments( - self, - source: "u.User", - intent: IntentAPI, - media_attachments: list[MediaAttachment], - ) -> list[ConvertedMessage]: - converted = [] - for attachment in media_attachments: - if attachment.media_type == "AUDIO": - if attachment.audio_metadata is None: - content = TextMessageEventContent( - msgtype=MessageType.NOTICE, - body="Unsupported audio message. No metadata found!", - ) - else: - url = attachment.audio_metadata.url - mxc, info, decryption_info = await self._reupload_linkedin_file( - url, source, intent, encrypt=self.encrypted - ) - info["duration"] = attachment.audio_metadata.duration - content = MediaMessageEventContent( - url=mxc, - file=decryption_info, - info=info, - msgtype=MessageType.AUDIO, - body="Voice message", - ) - content["org.matrix.msc1767.audio"] = { - "duration": attachment.audio_metadata.duration, - } - content["org.matrix.msc3245.voice"] = {} - else: - content = TextMessageEventContent( - msgtype=MessageType.NOTICE, - body=f"Unsupported media type {attachment.media_type}", - ) - converted.append((EventType.ROOM_MESSAGE, content)) - - return converted - - async def _convert_linkedin_third_party_media( - self, - source: "u.User", - intent: IntentAPI, - third_party_media: ThirdPartyMedia, - ) -> list[ConvertedMessage]: - if not third_party_media: - return [] - - if third_party_media.media_type == "TENOR_GIF": - if not third_party_media.media or not third_party_media.media.gif: - return [] - msgtype = MessageType.IMAGE - mxc, info, decryption_info = await self._reupload_linkedin_file( - third_party_media.media.gif.url, - source, - intent, - encrypt=self.encrypted, - width=third_party_media.media.gif.original_width, - height=third_party_media.media.gif.original_height, - ) - content = MediaMessageEventContent( - url=mxc, - file=decryption_info, - info=info, - msgtype=msgtype, - ) - return [(EventType.ROOM_MESSAGE, content)] - - self.log.warning(f"Unsupported third party media: {third_party_media}.") - return [] - - @classmethod - async def _reupload_linkedin_file( - cls, - url: str, - source: "u.User", - intent: IntentAPI, - *, - filename: str | None = None, - encrypt: bool = False, - find_size: bool = False, - width: int | None = None, - height: int | None = None, - ) -> tuple[ContentURI, MediaInfo, EncryptedFile | None]: - if not url: - raise ValueError("URL not provided") - - assert source.client - - file_data = await source.client.download_linkedin_media(url) - if len(file_data) > cls.matrix.media_config.upload_size: - raise ValueError("File not available: too large") - - mime = magic.from_buffer(file_data, mime=True) - - info = FileInfo(mimetype=mime, size=len(file_data)) - if Image and mime.startswith("image/"): - if (width is None or height is None) and find_size: - with Image.open(BytesIO(file_data)) as img: - width, height = img.size - if width and height: - info = ImageInfo( - mimetype=mime, - size=len(file_data), - width=width, - height=height, - ) - - upload_mime_type = mime - decryption_info = None - if encrypt and encrypt_attachment: - file_data, decryption_info = encrypt_attachment(file_data) - upload_mime_type = "application/octet-stream" - filename = None - url = await intent.upload_media( - file_data, - mime_type=upload_mime_type, - filename=filename, - async_upload=cls.config["homeserver.async_media"], - ) - if decryption_info: - decryption_info.url = url - return url, info, decryption_info - - async def handle_linkedin_reaction_add( - self, source: "u.User", sender: "p.Puppet", event: RealTimeEventStreamEvent - ): - if not event.event_urn or not self.li_receiver_urn or not event.reaction_summary: - return - reaction = event.reaction_summary.emoji - # Make up a URN for the reacton for dedup purposes - dedup_id = URN(f"({event.event_urn.id_str()},{sender.li_member_urn.id_str()},{reaction})") - async with self.optional_send_lock(sender.li_member_urn): - if dedup_id in self._dedup: - return - self._dedup.appendleft(dedup_id) - - # Check database for duplicates - dbr = await DBReaction.get_by_li_message_urn_and_emoji( - event.event_urn, - self.li_receiver_urn, - sender.li_member_urn, - reaction, - ) - if dbr: - self.log.debug( - f"Not handling reaction {reaction} to {event.event_urn}, found " - "duplicate in database." - ) - return - - if not await self._bridge_own_message_pm(source, sender, f"reaction to {event.event_urn}"): - return - - intent = sender.intent_for(self) - - message = await DBMessage.get_by_li_message_urn(event.event_urn, self.li_receiver_urn) - if not message: - self.log.debug(f"Ignoring reaction to unknown message {event.event_urn}") - return - - mxid = await intent.react(message.mx_room, message.mxid, reaction) - self.log.debug(f"Reacted to {message.mxid}, got {mxid}") - - await DBReaction( - mxid=mxid, - mx_room=message.mx_room, - li_message_urn=message.li_message_urn, - li_receiver_urn=self.li_receiver_urn, - li_sender_urn=sender.li_member_urn, - reaction=reaction, - ).insert() - self._dedup.remove(dedup_id) - - async def handle_linkedin_reaction_remove( - self, source: "u.User", sender: "p.Puppet", event: RealTimeEventStreamEvent - ): - if ( - not self.mxid - or not self.li_receiver_urn - or not event.event_urn - or not event.reaction_summary - ): - return - reaction = await DBReaction.get_by_li_message_urn_and_emoji( - event.event_urn, - self.li_receiver_urn, - sender.li_member_urn, - event.reaction_summary.emoji, - ) - if reaction: - try: - await sender.intent_for(self).redact(reaction.mx_room, reaction.mxid) - except MForbidden: - await self.main_intent.redact(reaction.mx_room, reaction.mxid) - await reaction.delete() - - async def handle_linkedin_conversation_read(self, source: "u.User"): - most_recent = await DBMessage.get_most_recent(self.li_thread_urn, self.li_receiver_urn) - if not most_recent: - return - puppet = await source.bridge.get_double_puppet(source.mxid) - if puppet and puppet.is_real_user: - await puppet.intent.mark_read(self.mxid, most_recent.mxid) - - async def handle_linkedin_seen_receipt( - self, source: "u.User", sender: "p.Puppet", event: RealTimeEventStreamEvent - ): - if messages := await DBMessage.get_all_by_li_message_urn( - event.seen_receipt.event_urn, self.li_receiver_urn - ): - messages.sort(key=lambda m: m.index) - await sender.intent.mark_read(self.mxid, messages[-1].mxid) - - async def handle_linkedin_typing(self, sender: "p.Puppet"): - await sender.intent.set_typing(self.mxid) - - # endregion diff --git a/linkedin_matrix/puppet.py b/linkedin_matrix/puppet.py deleted file mode 100644 index c3ede2f..0000000 --- a/linkedin_matrix/puppet.py +++ /dev/null @@ -1,319 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterable, Awaitable, cast -from datetime import datetime -import re - -from yarl import URL -import aiohttp -import magic - -from linkedin_messaging import URN -from linkedin_messaging.api_objects import MessagingMember, Picture -from mautrix.appservice import IntentAPI -from mautrix.bridge import BasePuppet, async_getter_lock -from mautrix.types import ContentURI, SyncToken, UserID -from mautrix.util.simple_template import SimpleTemplate - -from . import matrix as m, portal as p, user as u -from .config import Config -from .db import Puppet as DBPuppet - -if TYPE_CHECKING: - from .__main__ import LinkedInBridge - - -class Puppet(DBPuppet, BasePuppet): - bridge: LinkedInBridge - mx: m.MatrixHandler - config: Config - hs_domain: str - mxid_template: SimpleTemplate[str] - - by_li_member_urn: dict[URN, "Puppet"] = {} - by_custom_mxid: dict[UserID, "Puppet"] = {} - - session: aiohttp.ClientSession - - def __init__( - self, - li_member_urn: URN, - name: str | None = None, - photo_id: str | None = None, - photo_mxc: ContentURI | None = None, - name_set: bool = False, - avatar_set: bool = False, - contact_info_set: bool = False, - is_registered: bool = False, - custom_mxid: UserID | None = None, - access_token: str | None = None, - next_batch: SyncToken | None = None, - base_url: URL | None = None, - ): - super().__init__( - li_member_urn, - name, - photo_id, - photo_mxc, - custom_mxid, - access_token, - next_batch, - base_url, - name_set, - avatar_set, - contact_info_set, - is_registered, - ) - self._last_info_sync: datetime | None = None - - self.default_mxid = self.get_mxid_from_id(li_member_urn) - self.default_mxid_intent = self.az.intent.user(self.default_mxid) - self.intent = self._fresh_intent() - - self.log = self.log.getChild(str(self.li_member_urn)) - - @classmethod - def init_cls(cls, bridge: "LinkedInBridge") -> AsyncIterable[Awaitable[None]]: - cls.bridge = bridge - cls.config = bridge.config - cls.loop = bridge.loop - cls.mx = bridge.matrix - cls.az = bridge.az - cls.hs_domain = cls.config["homeserver.domain"] - cls.mxid_template = SimpleTemplate( - cls.config["bridge.username_template"], - "userid", - prefix="@", - suffix=f":{Puppet.hs_domain}", - type=str, - ) - cls.sync_with_custom_puppets = cls.config["bridge.sync_with_custom_puppets"] - cls.homeserver_url_map = { - server: URL(url) - for server, url in cls.config["bridge.double_puppet_server_map"].items() - } - cls.allow_discover_url = cls.config["bridge.double_puppet_allow_discovery"] - cls.login_shared_secret_map = { - server: secret.encode("utf-8") - for server, secret in cls.config["bridge.login_shared_secret_map"].items() - } - cls.login_device_name = "LinkedIn Messages Bridge" - cls.session = aiohttp.ClientSession() - - return (puppet.try_start() async for puppet in Puppet.get_all_with_custom_mxid()) - - @classmethod - async def close(cls): - await cls.session.close() - - def intent_for(self, portal: "p.Portal") -> IntentAPI: - if portal.li_other_user_urn == self.li_member_urn or ( - portal.backfill_lock.locked and self.config["bridge.backfill.invite_own_puppet"] - ): - return self.default_mxid_intent - return self.intent - - # region User info updating - - async def update_info( - self, - source: u.User | None, - info: MessagingMember, - update_avatar: bool = True, - ) -> "Puppet": - assert source - - self._last_info_sync = datetime.now() - try: - changed = await self._update_contact_info(info) - changed = await self._update_name(info) or changed - if update_avatar: - photo = info.alternate_image or ( - info.mini_profile.picture if info.mini_profile else None - ) - changed = await self._update_photo(photo) or changed - - if changed: - await self.save() - except Exception: - self.log.exception(f"Failed to update info from source {source.li_member_urn}") - return self - - async def _update_contact_info(self, info: MessagingMember, force: bool = False) -> bool: - if not self.bridge.homeserver_software.is_hungry: - return False - - if self.contact_info_set and not force: - return False - - try: - identifiers = [] - if info.mini_profile: - identifiers.append(f"linkedin:{info.mini_profile.public_identifier}") - await self.default_mxid_intent.beeper_update_profile( - { - "com.beeper.bridge.identifiers": identifiers, - "com.beeper.bridge.remote_id": str(self.li_member_urn), - "com.beeper.bridge.service": self.bridge.beeper_service_name, - "com.beeper.bridge.network": self.bridge.beeper_network_name, - } - ) - self.contact_info_set = True - except Exception: - self.log.exception("Error updating contact info") - self.contact_info_set = False - return True - - async def reupload_avatar(self, intent: IntentAPI, url: str) -> ContentURI: - async with self.session.get(url) as req: - if not req.ok: - raise Exception(f"Couldn't download avatar for {self.li_member_urn}: {url}") - - image_data = await req.content.read() - mime = magic.from_buffer(image_data, mime=True) - return await intent.upload_media( - image_data, mime_type=mime, async_upload=self.config["homeserver.async_media"] - ) - - async def _update_name(self, info: MessagingMember) -> bool: - name = self._get_displayname(info) - if name != self.name or not self.name_set: - self.name = name - try: - await self.default_mxid_intent.set_displayname(self.name) - self.name_set = True - except Exception: - self.log.exception("Failed to set displayname") - self.name_set = False - return True - return False - - @classmethod - def _get_displayname(cls, info: MessagingMember) -> str: - if not info.mini_profile: - raise Exception(f"No mini_profile found for {info.entity_urn}") - first, last = info.mini_profile.first_name, info.mini_profile.last_name - info_map = { - "displayname": info.alternate_name, - "name": info.alternate_name or f"{first} {last}", - "first_name": info.alternate_name or first, - "last_name": last or "", - } - for preference in cls.config["bridge.displayname_preference"]: - pref = info_map.get(preference) - if pref: - info_map["displayname"] = pref - break - return cls.config["bridge.displayname_template"].format(**info_map) - - photo_id_re = re.compile(r"https://.*?/image/(.*?)/(profile|spinmail)-.*?") - - async def _update_photo(self, picture: Picture | None) -> bool: - photo_id = None - if picture and (vi := picture.vector_image): - match = self.photo_id_re.match(vi.root_url) - # Handle InMail pictures which don't have any root_url - if not match and len(vi.artifacts) > 0: - match = self.photo_id_re.match(vi.artifacts[0].file_identifying_url_path_segment) - if match: - photo_id = match.group(1) - - if photo_id != self.photo_id or not self.avatar_set: - self.photo_id = photo_id - - if photo_id and picture and (vi := picture.vector_image): - largest_artifact = vi.artifacts[-1] - self.photo_mxc = await self.reupload_avatar( - self.default_mxid_intent, - (vi.root_url + largest_artifact.file_identifying_url_path_segment), - ) - else: - self.photo_mxc = ContentURI("") - - try: - await self.default_mxid_intent.set_avatar_url(self.photo_mxc) - self.avatar_set = True - except Exception: - self.log.exception("Failed to set avatar") - self.avatar_set = False - - return True - return False - - # endregion - - # region Database getters - - def _add_to_cache(self): - self.by_li_member_urn[self.li_member_urn] = self - if self.custom_mxid: - self.by_custom_mxid[self.custom_mxid] = self - - @classmethod - @async_getter_lock - async def get_by_li_member_urn( - cls, - li_member_urn: URN, - *, - create: bool = True, - ) -> Puppet | None: - try: - return cls.by_li_member_urn[li_member_urn] - except KeyError: - pass - - puppet = cast(Puppet | None, await super().get_by_li_member_urn(li_member_urn)) - if puppet: - puppet._add_to_cache() - return puppet - - if create: - puppet = cls(li_member_urn, None, None, None, False, False) - await puppet.insert() - puppet._add_to_cache() - return puppet - - return None - - @classmethod - async def get_by_mxid(cls, mxid: UserID, create: bool = True) -> Puppet | None: - li_member_urn = cls.get_id_from_mxid(mxid) - if li_member_urn: - return await cls.get_by_li_member_urn(li_member_urn, create=create) - return None - - @classmethod - @async_getter_lock - async def get_by_custom_mxid(cls, mxid: UserID) -> Puppet | None: - try: - return cls.by_custom_mxid[mxid] - except KeyError: - pass - - puppet = cast("Puppet", await super().get_by_custom_mxid(mxid)) - if puppet: - puppet._add_to_cache() - return puppet - - return None - - @classmethod - async def get_all_with_custom_mxid(cls) -> AsyncGenerator["Puppet", None]: - puppets = await super().get_all_with_custom_mxid() - for puppet in cast(list[Puppet], puppets): - try: - yield cls.by_li_member_urn[puppet.li_member_urn] - except KeyError: - puppet._add_to_cache() - yield puppet - - @classmethod - def get_id_from_mxid(cls, mxid: UserID) -> URN | None: - parsed = cls.mxid_template.parse(mxid) - return URN(parsed) if parsed else None - - @classmethod - def get_mxid_from_id(cls, li_member_urn: URN) -> UserID: - return UserID(cls.mxid_template.format_full(li_member_urn.id_str())) - - # endregion diff --git a/linkedin_matrix/user.py b/linkedin_matrix/user.py deleted file mode 100644 index ab3f533..0000000 --- a/linkedin_matrix/user.py +++ /dev/null @@ -1,697 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterable, Awaitable, Optional, cast -from asyncio.futures import Future -from datetime import datetime -import asyncio -import sys -import time - -from aiohttp.client_exceptions import ServerConnectionError, TooManyRedirects - -from linkedin_messaging import URN, LinkedInMessaging -from linkedin_messaging.api_objects import ( - Conversation, - ConversationEvent, - ReactionSummary, - RealTimeEventStreamEvent, - UserProfileResponse, -) -from mautrix.bridge import BaseUser, async_getter_lock -from mautrix.errors import MNotFound -from mautrix.types import EventType, PushActionType, PushRuleKind, PushRuleScope, RoomID, UserID -from mautrix.util.bridge_state import BridgeState, BridgeStateEvent -from mautrix.util.opt_prometheus import Gauge, Summary, async_time -from mautrix.util.simple_lock import SimpleLock - -from . import portal as po, puppet as pu -from .config import Config -from .db import Cookie, HttpHeader, User as DBUser - -if TYPE_CHECKING: - from .__main__ import LinkedInBridge - -METRIC_CONNECTED = Gauge("bridge_connected", "Bridge users connected to LinkedIn") -METRIC_LOGGED_IN = Gauge("bridge_logged_in", "Users logged into the bridge") -METRIC_SYNC_THREADS = Summary("bridge_sync_threads", "calls to sync_threads") - - -class User(DBUser, BaseUser): - shutdown: bool = False - config: Config - - user_profile_cache: UserProfileResponse | None = None - - by_mxid: dict[UserID, "User"] = {} - by_li_member_urn: dict[URN, "User"] = {} - - listen_task: asyncio.Task | None - - _is_connected: bool | None - _is_logged_in: bool | None - _is_logging_out: bool | None - _is_refreshing: bool - _notice_room_lock: asyncio.Lock - _notice_send_lock: asyncio.Lock - _sync_lock: SimpleLock - is_admin: bool - - client: LinkedInMessaging | None = None - - def __init__( - self, - mxid: UserID, - li_member_urn: URN | None = None, - notice_room: RoomID | None = None, - space_mxid: RoomID | None = None, - ): - super().__init__(mxid, li_member_urn, notice_room, space_mxid) - BaseUser.__init__(self) - self._notice_room_lock = asyncio.Lock() - self._notice_send_lock = asyncio.Lock() - - self.command_status = None - ( - self.is_whitelisted, - self.is_admin, - self.permission_level, - ) = self.config.get_permissions(mxid) - self._is_logged_in = None - self._is_logging_out = None - self._is_connected = None - self._connection_time = time.monotonic() - self._prev_thread_sync = -10 - self._prev_reconnect_fail_refresh = time.monotonic() - self._community_id = None - self._sync_lock = SimpleLock( - "Waiting for thread sync to finish before handling %s", log=self.log - ) - self._is_refreshing = False - - self.log = self.log.getChild(self.mxid) - - self.listen_task = None - - @classmethod - def init_cls(cls, bridge: LinkedInBridge) -> AsyncIterable[Awaitable[bool]]: - cls.bridge = bridge - cls.config = bridge.config - cls.az = bridge.az - cls.loop = bridge.loop - cls.temp_disconnect_notices = bridge.config["bridge.temporary_disconnect_notices"] - return (user.load_session(is_startup=True) async for user in cls.all_logged_in()) - - @property - def is_connected(self) -> bool | None: - return self._is_connected - - @is_connected.setter - def is_connected(self, val: bool | None): - if self._is_connected != val: - self._is_connected = val - self._connection_time = time.monotonic() - - # region Database getters - - def _add_to_cache(self): - self.by_mxid[self.mxid] = self - if self.li_member_urn: - self.by_li_member_urn[self.li_member_urn] = self - - @classmethod - async def all_logged_in(cls) -> AsyncGenerator["User", None]: - users = await super().all_logged_in() - for user in cast(list["User"], users): - try: - yield cls.by_mxid[user.mxid] - except KeyError: - user._add_to_cache() - yield user - - @classmethod - @async_getter_lock - async def get_by_mxid( - cls, - mxid: UserID, - *, - create: bool = True, - ) -> User | None: - if pu.Puppet.get_id_from_mxid(mxid) or mxid == cls.az.bot_mxid: - return None - try: - return cls.by_mxid[mxid] - except KeyError: - pass - - user = cast("User", await super().get_by_mxid(mxid)) - if user is not None: - user._add_to_cache() - return user - - if create: - cls.log.debug(f"Creating user instance for {mxid}") - user = cls(mxid) - await user.insert() - user._add_to_cache() - return user - - return None - - @classmethod - @async_getter_lock - async def get_by_li_member_urn(cls, li_member_urn: URN) -> User | None: - try: - return cls.by_li_member_urn[li_member_urn] - except KeyError: - pass - - user = cast("User", await super().get_by_li_member_urn(li_member_urn)) - if user is not None: - user._add_to_cache() - return user - - return None - - async def get_puppet(self) -> pu.Puppet | None: - if not self.li_member_urn: - return None - return await pu.Puppet.get_by_li_member_urn(self.li_member_urn) - - async def get_portal_with(self, puppet: pu.Puppet, create: bool = True) -> po.Portal | None: - # We should probably make this work eventually, but for now, creating chats will just not - # work. - return None - - # endregion - - # region Session Management - - async def load_session(self, is_startup: bool = False) -> bool: - if self._is_logged_in and is_startup: - return True - cookies = await Cookie.get_for_mxid(self.mxid) - cookie_names = set(c.name for c in cookies) - if "li_at" not in cookie_names or "JSESSIONID" not in cookie_names: - await self.push_bridge_state(BridgeStateEvent.BAD_CREDENTIALS, error="logged-out") - return False - - self.client = LinkedInMessaging.from_cookies_and_headers( - {c.name: c.value for c in cookies}, - {h.name: h.value for h in await HttpHeader.get_for_mxid(self.mxid)}, - ) - - backoff = 1.0 - while True: - try: - self.user_profile_cache = await self.client.get_user_profile() - break - except (TooManyRedirects, ServerConnectionError) as e: - self.log.info(f"Failed to get user profile: {e}") - await self.push_bridge_state(BridgeStateEvent.BAD_CREDENTIALS, message=str(e)) - return False - except Exception as e: - self.log.exception("Failed to get user profile") - time.sleep(backoff) - backoff *= 2 - if backoff > 64: - # If we can't get the user profile and it's not due to the session being - # invalid, it's probably a network error. Go ahead and push the UNKNOWN_ERROR, - # and then crash the bridge. - await self.push_bridge_state(BridgeStateEvent.UNKNOWN_ERROR, message=str(e)) - sys.exit(1) - - if (mp := self.user_profile_cache.mini_profile) and mp.entity_urn: - self.li_member_urn = mp.entity_urn - else: - return False - - await self.push_bridge_state(BridgeStateEvent.CONNECTING) - - self.log.info("Loaded session successfully") - self._track_metric(METRIC_LOGGED_IN, True) - self._is_logged_in = True - self.is_connected = None - self.stop_listen() - asyncio.create_task(self.post_login()) - return True - - async def reconnect(self): - assert self.listen_task - self._is_refreshing = True - await self.listen_task - self.listen_task = None - self.start_listen() - self._is_refreshing = False - - async def is_logged_in(self) -> bool: - self.log.debug("Checking if logged in") - if not self.client: - self.log.debug("Not logged in: no client") - return False - if self._is_logged_in is None: - try: - self._is_logged_in = await self.client.logged_in() - self.log.debug("checked if client is logged in: %s", self._is_logged_in) - except Exception: - self.log.exception("Exception checking login status") - self._is_logged_in = False - self.user_profile_cache = None - return self._is_logged_in or False - - async def on_logged_in(self, cookies: dict[str, str], headers: Optional[dict[str, str]]): - cookies = {k: v.strip('"') for k, v in cookies.items()} - await Cookie.bulk_upsert(self.mxid, cookies) - if headers: - await HttpHeader.bulk_upsert(self.mxid, headers) - self.client = LinkedInMessaging.from_cookies_and_headers(cookies, headers) - self.listener_event_handlers_created = False - self.user_profile_cache = await self.client.get_user_profile() - if (mp := self.user_profile_cache.mini_profile) and mp.entity_urn: - self.li_member_urn = mp.entity_urn - else: - raise Exception("No mini_profile.entity_urn on the user profile!") - await self.push_bridge_state(BridgeStateEvent.CONNECTING) - await self.save() - self.stop_listen() - await self.load_session() - - async def post_login(self): - self.log.info("Running post-login actions") - self._add_to_cache() - - try: - puppet = await pu.Puppet.get_by_li_member_urn(self.li_member_urn) - - if puppet.custom_mxid != self.mxid and puppet.can_auto_login(self.mxid): - self.log.info("Automatically enabling custom puppet") - await puppet.switch_mxid(access_token="auto", mxid=self.mxid) - except Exception: - self.user_profile_cache = None - self.log.exception("Failed to automatically enable custom puppet") - - await self._create_or_update_space() - await self.sync_threads() - self.start_listen() - - async def logout(self): - self.log.info("Logging out") - self._is_logged_in = False - self._is_logging_out = True - self.stop_listen() - if self.client: - self.log.info("Logging out the client.") - await self.client.logout() - await self.push_bridge_state(BridgeStateEvent.LOGGED_OUT) - self._prev_connected_bridge_state = None - puppet = await pu.Puppet.get_by_li_member_urn(self.li_member_urn, create=False) - if puppet and puppet.is_real_user: - await puppet.switch_mxid(None, None) - if self.li_member_urn: - try: - del self.by_li_member_urn[self.li_member_urn] - except KeyError: - pass - await Cookie.delete_all_for_mxid(self.mxid) - await HttpHeader.delete_all_for_mxid(self.mxid) - self._track_metric(METRIC_LOGGED_IN, True) - self.client = None - self.listener_event_handlers_created = False - self.user_profile_cache = None - self.li_member_urn = None - self.notice_room = None - await self.save() - self._is_logging_out = False - - # endregion - - # Spaces support - - async def _create_or_update_space(self): - if not self.config["bridge.space_support.enable"]: - return - - avatar_state_event_content = {"url": self.config["appservice.bot_avatar"]} - name_state_event_content = {"name": self.config["bridge.space_support.name"]} - - if self.space_mxid: - await self.az.intent.send_state_event( - self.space_mxid, EventType.ROOM_AVATAR, avatar_state_event_content - ) - await self.az.intent.send_state_event( - self.space_mxid, EventType.ROOM_NAME, name_state_event_content - ) - else: - self.log.debug(f"Creating space for {self.li_member_urn}, inviting {self.mxid}") - room = await self.az.intent.create_room( - is_direct=False, - invitees=[self.mxid], - creation_content={"type": "m.space"}, - initial_state=[ - { - "type": str(EventType.ROOM_NAME), - "content": name_state_event_content, - }, - { - "type": str(EventType.ROOM_AVATAR), - "content": avatar_state_event_content, - }, - ], - ) - self.space_mxid = room - await self.save() - self.log.debug(f"Created space {room}") - try: - await self.az.intent.ensure_joined(room) - except Exception: - self.log.warning(f"Failed to add bridge bot to new space {room}") - - # endregion - - # region Thread Syncing - - async def get_direct_chats(self) -> dict[UserID, list[RoomID]]: - assert self.li_member_urn - return { - pu.Puppet.get_mxid_from_id(portal.li_other_user_urn): [portal.mxid] - async for portal in po.Portal.get_all_by_li_receiver_urn(self.li_member_urn) - if portal.mxid and portal.li_other_user_urn - } - - @async_time(METRIC_SYNC_THREADS) - async def sync_threads(self): - if self._prev_thread_sync + 10 > time.monotonic(): - self.log.debug("Previous thread sync was less than 10 seconds ago, not re-syncing") - return - self._prev_thread_sync = time.monotonic() - try: - await self._sync_threads() - except Exception: - self.log.exception("Failed to sync threads") - - async def _sync_threads(self): - assert self.client - sync_count = self.config["bridge.initial_chat_sync"] - if sync_count <= 0: - return - - self.log.debug("Fetching threads...") - await self.push_bridge_state(BridgeStateEvent.BACKFILLING) - - last_activity_before = datetime.now() - synced_threads = 0 - while True: - if synced_threads >= sync_count: - break - conversations_response = await self.client.get_conversations( - last_activity_before=last_activity_before - ) - for conversation in conversations_response.elements: - if synced_threads >= sync_count: - break - try: - await self._sync_thread(conversation) - except Exception: - self.user_profile_cache = None - self.log.exception(f"Failed to sync thread {conversation.entity_urn}") - synced_threads += 1 - - await self.update_direct_chats() - - # The page size is 20, by default, so if we get less than 20, we are at the - # end of the list so we should stop. - if len(conversations_response.elements) < 20: - break - - if last_activity_at := conversations_response.elements[-1].last_activity_at: - last_activity_before = last_activity_at - else: - break - - await self.update_direct_chats() - - async def _sync_thread(self, conversation: Conversation): - self.log.debug(f"Syncing thread {conversation.entity_urn}") - - li_other_user_urn = None - if not conversation.group_chat: - other_user = conversation.participants[0] - if (mm := other_user.messaging_member) and (mp := mm.mini_profile) and mp.entity_urn: - li_other_user_urn = mp.entity_urn - if li_other_user_urn == URN("UNKNOWN"): - li_other_user_urn = conversation.entity_urn - else: - raise Exception("Other chat participant didn't have an entity_urn!") - - portal = await po.Portal.get_by_li_thread_urn( - conversation.entity_urn, - li_receiver_urn=self.li_member_urn, - li_is_group_chat=conversation.group_chat, - li_other_user_urn=li_other_user_urn, - ) - assert portal - portal = cast(po.Portal, portal) - - was_created = False - if not portal.mxid: - await portal.create_matrix_room(self, conversation) - was_created = True - else: - await portal.update_matrix_room(self, conversation) - await portal.backfill(self, conversation, is_initial=False) - if was_created or not self.config["bridge.tag_only_on_create"]: - await self._mute_room(portal, conversation.muted) - - async def _mute_room(self, portal: po.Portal, muted: bool): - if not self.config["bridge.mute_bridging"] or not portal or not portal.mxid: - return - puppet = await pu.Puppet.get_by_custom_mxid(self.mxid) - if not puppet or not puppet.is_real_user: - return - if muted: - await puppet.intent.set_push_rule( - PushRuleScope.GLOBAL, - PushRuleKind.ROOM, - portal.mxid, - actions=[PushActionType.DONT_NOTIFY], - ) - else: - try: - await puppet.intent.remove_push_rule( - PushRuleScope.GLOBAL, PushRuleKind.ROOM, portal.mxid - ) - except MNotFound: - pass - - # endregion - - # region Listener and State Management - - async def fill_bridge_state(self, state: BridgeState): - await super().fill_bridge_state(state) - if not self.li_member_urn: - return - state.remote_id = self.li_member_urn.get_id() - state.remote_name = "" - user = await User.get_by_li_member_urn(self.li_member_urn) - if user and user.client: - try: - user_profile = user.user_profile_cache - if user_profile is not None: - self.log.debug("Cache hit on user_profile_cache") - user_profile = user_profile or await user.client.get_user_profile() - if mp := user_profile.mini_profile: - state.remote_name = " ".join(n for n in [mp.first_name, mp.last_name] if n) - except Exception: - self.user_profile_cache = None - pass - - def stop_listen(self): - self.log.info("Stopping the listener.") - if self.listen_task: - self.log.info("Cancelling the listen task.") - self.listen_task.cancel() - self.listen_task = None - - def on_listen_task_end(self, future: Future): - if future.cancelled(): - self.log.info("Listener task cancelled") - if self.client and self._is_logged_in and not self.shutdown: - self.start_listen() - else: - # This most likely means that the bridge is being stopped/restarted. But, - # occasionally, the user gets logged out. In these cases, we want to reset - # _is_logged_in so the next whoami call does a full call out to LinkedIn to - # detect whether the user is logged in. - self.log.warn("No client, not logged in, or shutdown. Not reconnecting.") - if ( - not self._is_logged_in - and not self._is_logging_out - and self.client - and not self.shutdown - ): - self._track_metric(METRIC_CONNECTED, False) - self.log.warn("Logged out, but not by a logout call, sending bad credentials.") - asyncio.create_task(self.push_bridge_state(BridgeStateEvent.BAD_CREDENTIALS)) - future.cancel() - - listener_event_handlers_created: bool = False - listener_task_i: int = 0 - - def start_listen(self): - self.log.info("Starting listener task.") - self.listen_task = asyncio.create_task( - self._try_listen(), - name=f"listener task #{self.listener_task_i}", - ) - self.listen_task.add_done_callback(self.on_listen_task_end) - - _prev_connected_bridge_state: float | None = None - - async def _try_listen(self): - self.log.info("Trying to start the listener") - if not self.client: - self.log.error("No client, cannot start listener!") - return - if not self.listener_event_handlers_created: - self.log.info("Adding listeners to client") - self.client.add_event_listener("ALL_EVENTS", self.handle_linkedin_stream_event) - self.client.add_event_listener("event", self.handle_linkedin_event) - self.client.add_event_listener("reactionAdded", self.handle_linkedin_reaction_added) - self.client.add_event_listener("action", self.handle_linkedin_action) - self.client.add_event_listener("fromEntity", self.handle_linkedin_from_entity) - self.listener_event_handlers_created = True - try: - await self.client.start_listener(self.li_member_urn) - # Make sure all of the cookies are up-to-date - await Cookie.bulk_upsert(self.mxid, self.client.cookies()) - except Exception as e: - self.log.exception(f"Exception in listener: {e}") - self._prev_connected_bridge_state = None - self._track_metric(METRIC_CONNECTED, False) - self.user_profile_cache = None - - if isinstance(e, TooManyRedirects): - # This means that the user's session is borked (the redirects mean it's trying to - # redirect to the login page). - self._is_logged_in = False - self._is_connected = False - else: - await self.push_bridge_state(BridgeStateEvent.TRANSIENT_DISCONNECT, message=str(e)) - await asyncio.sleep(5) - - async def _push_connected_state(self): - if ( - # We haven't sent a CONNECTED state ever. - not self._prev_connected_bridge_state - # We haven't sent a CONNECTED state in the last 12 hours. - or self._prev_connected_bridge_state + (12 * 60 * 60) < time.monotonic() - ): - await self.push_bridge_state( - BridgeStateEvent.CONNECTED, - info={"using_headers_from_user": self.client.using_headers_from_user}, - ) - self._prev_connected_bridge_state = time.monotonic() - else: - self.log.trace("Event received on event stream, but not sending CONNECTED") - - async def handle_linkedin_stream_event(self, _): - self._track_metric(METRIC_CONNECTED, True) - await self._push_connected_state() - - async def handle_linkedin_event(self, event: RealTimeEventStreamEvent): - assert self.client - assert isinstance(event.event, ConversationEvent) - assert event.event.entity_urn - - thread_urn, message_urn = map(URN, event.event.entity_urn.id_parts) - if ( - (e := event.event) - and (f := e.from_) - and (mm := f.messaging_member) - and (mp := mm.mini_profile) - and (entity_urn := mp.entity_urn) - ): - sender_urn = entity_urn - else: - raise Exception("Invalid sender: no entity_urn found!", event) - - portal = await po.Portal.get_by_li_thread_urn( - thread_urn, li_receiver_urn=self.li_member_urn, create=False - ) - if not portal: - conversations = await self.client.get_conversations() - for conversation in conversations.elements: - if conversation.entity_urn == thread_urn: - await self._sync_thread(conversation) - break - - # Nothing more to do, since the backfill should handle the message coming - # in. - return - - puppet = await pu.Puppet.get_by_li_member_urn(sender_urn) - - await portal.backfill_lock.wait(message_urn) - await portal.handle_linkedin_message(self, puppet, event.event) - - async def handle_linkedin_reaction_added(self, event: RealTimeEventStreamEvent): - assert isinstance(event.reaction_summary, ReactionSummary) - assert isinstance(event.reaction_added, bool) - assert isinstance(event.actor_mini_profile_urn, URN) - assert isinstance(event.event_urn, URN) - - thread_urn, message_urn = map(URN, event.event_urn.id_parts) - - portal = await po.Portal.get_by_li_thread_urn( - thread_urn, li_receiver_urn=self.li_member_urn, create=False - ) - if not portal: - conversations = await self.client.get_conversations() - for conversation in conversations.elements: - if conversation.entity_urn == thread_urn: - await self._sync_thread(conversation) - break - - # Nothing more to do, since the backfill should handle the message coming - # in. - return - - puppet = await pu.Puppet.get_by_li_member_urn(event.actor_mini_profile_urn) - - await portal.backfill_lock.wait(message_urn) - if event.reaction_added: - await portal.handle_linkedin_reaction_add(self, puppet, event) - else: - await portal.handle_linkedin_reaction_remove(self, puppet, event) - - async def handle_linkedin_action(self, event: RealTimeEventStreamEvent): - if event.action != "UPDATE": - return - if ( - (raw_conversation := event.conversation) - and isinstance(raw_conversation, dict) - and (conversation := Conversation.from_dict(raw_conversation)) - and conversation.read - ): - if portal := await po.Portal.get_by_li_thread_urn( - conversation.entity_urn, li_receiver_urn=self.li_member_urn, create=False - ): - await portal.handle_linkedin_conversation_read(self) - - async def handle_linkedin_from_entity(self, event: RealTimeEventStreamEvent): - if seen_receipt := event.seen_receipt: - conversation_urn = URN(seen_receipt.event_urn.id_parts[0]) - if portal := await po.Portal.get_by_li_thread_urn( - conversation_urn, li_receiver_urn=self.li_member_urn, create=False - ): - puppet = await pu.Puppet.get_by_li_member_urn(event.from_entity) - await portal.handle_linkedin_seen_receipt(self, puppet, event) - - if isinstance(event.conversation, str): - if portal := await po.Portal.get_by_li_thread_urn( - URN(event.conversation), li_receiver_urn=self.li_member_urn, create=False - ): - puppet = await pu.Puppet.get_by_li_member_urn(event.from_entity) - await portal.handle_linkedin_typing(puppet) - - # endregion diff --git a/linkedin_matrix/version.py b/linkedin_matrix/version.py deleted file mode 100644 index 1d13f05..0000000 --- a/linkedin_matrix/version.py +++ /dev/null @@ -1,3 +0,0 @@ -from .get_version import git_revision, git_tag, linkified_version, version - -__all__ = ("git_revision", "git_tag", "linkified_version", "version") diff --git a/linkedin_matrix/web/__init__.py b/linkedin_matrix/web/__init__.py deleted file mode 100644 index d3529b7..0000000 --- a/linkedin_matrix/web/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .provisioning_api import ProvisioningAPI - -__all__ = ("ProvisioningAPI",) diff --git a/linkedin_matrix/web/provisioning_api.py b/linkedin_matrix/web/provisioning_api.py deleted file mode 100644 index 4eb341e..0000000 --- a/linkedin_matrix/web/provisioning_api.py +++ /dev/null @@ -1,156 +0,0 @@ -from typing import Any, Awaitable -import json -import logging - -from aiohttp import web - -from mautrix.types import UserID -from mautrix.util.logging import TraceLogger - -from .. import user as u -from ..analytics import track - - -class ProvisioningAPI: - log: TraceLogger = logging.getLogger("mau.web.provisioning") - app: web.Application - - def __init__(self, shared_secret: str): - self.app = web.Application() - self.shared_secret = shared_secret - self.app.router.add_get("/api/whoami", self.status) - self.app.router.add_options("/api/login", self.login_options) - self.app.router.add_post("/api/login", self.login) - self.app.router.add_post("/api/logout", self.logout) - - @property - def _acao_headers(self) -> dict[str, str]: - return { - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Headers": "Authorization, Content-Type", - "Access-Control-Allow-Methods": "POST, OPTIONS", - } - - @property - def _headers(self) -> dict[str, str]: - return { - **self._acao_headers, - "Content-Type": "application/json", - } - - async def login_options(self, _) -> web.Response: - return web.Response(status=200, headers=self._headers) - - def check_token(self, request: web.Request) -> Awaitable["u.User"]: - try: - token = request.headers["Authorization"] - token = token[len("Bearer ") :] - except KeyError: - raise web.HTTPBadRequest( - body='{"error": "Missing Authorization header"}', headers=self._headers - ) - except IndexError: - raise web.HTTPBadRequest( - body='{"error": "Malformed Authorization header"}', - headers=self._headers, - ) - if token != self.shared_secret: - raise web.HTTPForbidden(body='{"error": "Invalid token"}', headers=self._headers) - try: - user_id = request.query["user_id"] - except KeyError: - raise web.HTTPBadRequest( - body='{"error": "Missing user_id query param"}', headers=self._headers - ) - - return u.User.get_by_mxid(UserID(user_id)) - - async def status(self, request: web.Request) -> web.Response: - try: - user = await self.check_token(request) - except web.HTTPError as e: - return e - - data: dict[str, Any] = { - "permissions": user.permission_level, - "mxid": user.mxid, - "linkedin": None, - } - if await user.is_logged_in() and user.client: - user_profile = user.user_profile_cache - if user_profile is not None: - self.log.debug("Cache hit on user_profile_cache") - user_profile = user_profile or await user.client.get_user_profile() - data["linkedin"] = user_profile.to_dict() - - return web.json_response(data, headers=self._acao_headers) - - async def login(self, request: web.Request) -> web.Response: - try: - user = await self.check_token(request) - except web.HTTPError as e: - return e - - track(user, "$login_start") - try: - req_data = await request.json() - except json.JSONDecodeError: - return web.HTTPBadRequest(body='{"error": "Malformed JSON"}', headers=self._headers) - - cookie_dict = {} - headers = {} - - def parse_cookies(c): - for cookie in c.split("; "): - key, val = cookie.split("=", 1) - cookie_dict[key] = val - logging.info(f"Got cookies: {cookie_dict.keys()}") - - if "all_headers" in req_data: - all_headers = req_data["all_headers"] - logging.info(f"Got headers: {all_headers.keys()}") - - cookies = all_headers.pop("Cookie", all_headers.pop("cookie", None)) - if not cookies: - return web.HTTPBadRequest( - body='{"error": "Missing cookies"}', headers=self._headers - ) - - parse_cookies(cookies) - - # We never want the accept header, skip it - all_headers.pop("Accept", None) - all_headers.pop("accept", None) - - # Save the rest of the headers - headers = all_headers - elif "cookie_header" in req_data: - parse_cookies(req_data["cookie_header"]) - elif "li_at" in req_data and "JSESSIONID" in req_data: - # The request is just a dictionary of individual cookies - cookie_dict = req_data - logging.info(f"Legacy login, got cookies: {cookie_dict.keys()}") - - if "li_at" not in cookie_dict or "JSESSIONID" not in cookie_dict: - return web.HTTPBadRequest(body='{"error": "Missing keys"}', headers=self._headers) - - try: - await user.on_logged_in(cookie_dict, headers) - track(user, "$login_success") - except Exception as e: - track(user, "$login_failed", {"error": str(e)}) - self.log.exception("Failed to log in", exc_info=True) - return web.HTTPUnauthorized( - body='{"error": "LinkedIn authorization failed"}', headers=self._headers - ) - return web.Response(body="{}", status=200, headers=self._headers) - - async def logout(self, request: web.Request) -> web.Response: - try: - user = await self.check_token(request) - if user.client: - await user.logout() - except web.HTTPError: - pass - - return web.json_response({}, headers=self._acao_headers) diff --git a/linkedin_messaging/__init__.py b/linkedin_messaging/__init__.py deleted file mode 100644 index 280e423..0000000 --- a/linkedin_messaging/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""An unofficial API for interacting with LinkedIn Messaging""" - -from .api_objects import URN -from .linkedin import ChallengeException, LinkedInMessaging - -__all__ = ("ChallengeException", "LinkedInMessaging", "URN") diff --git a/linkedin_messaging/api_objects.py b/linkedin_messaging/api_objects.py deleted file mode 100644 index 7039fd1..0000000 --- a/linkedin_messaging/api_objects.py +++ /dev/null @@ -1,577 +0,0 @@ -from typing import Any, Callable, Optional, Union -from dataclasses import dataclass, field -from datetime import datetime - -from dataclasses_json import DataClassJsonMixin, LetterCase, Undefined, config, dataclass_json -import dataclasses_json - - -class URN: - def __init__(self, urn_str: str): - urn_parts = urn_str.split(":") - self.prefix = ":".join(urn_parts[:-1]) - self.id_parts = urn_parts[-1].strip("()").split(",") - - def get_id(self) -> str: - assert len(self.id_parts) == 1 - return self.id_parts[0] - - def id_str(self) -> str: - return ",".join(self.id_parts) - - def __str__(self) -> str: - return "{}:{}".format( - self.prefix, - (self.id_parts[0] if len(self.id_parts) == 1 else f"({self.id_str()})"), - ) - - def __hash__(self) -> int: - return hash(self.id_str()) - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, URN): - return False - return self.id_parts == other.id_parts - - def __repr__(self) -> str: - return f"URN('{str(self)}')" - - -# Use milliseconds instead of seconds from the UNIX epoch. -decoder_functions = { - datetime: (lambda s: datetime.utcfromtimestamp(int(s) / 1000) if s else None), - URN: (lambda s: URN(s) if s else None), -} -encoder_functions: dict[Any, Callable[[Any], Any]] = { - datetime: (lambda d: int(d.timestamp() * 1000) if d else None), - URN: (lambda u: str(u) if u else None), -} - -for type_, translation_function in decoder_functions.items(): - dataclasses_json.cfg.global_config.decoders[type_] = translation_function - dataclasses_json.cfg.global_config.decoders[ - Optional[type_] # type: ignore - ] = translation_function - -for type_, translation_function in encoder_functions.items(): - dataclasses_json.cfg.global_config.encoders[type_] = translation_function - dataclasses_json.cfg.global_config.encoders[ - Optional[type_] # type: ignore - ] = translation_function - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Artifact: - height: int = -1 - width: int = -1 - file_identifying_url_path_segment: str = "" - expires_at: Optional[datetime] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class VectorImage: - artifacts: list[Artifact] = field(default_factory=list) - root_url: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Picture: - vector_image: Optional[VectorImage] = field( - metadata=config(field_name="com.linkedin.common.VectorImage"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MiniProfile: - entity_urn: Optional[URN] = None - public_identifier: Optional[str] = None - first_name: Optional[str] = None - last_name: Optional[str] = None - occupation: Optional[str] = None - memorialized: bool = False - object_urn: Optional[URN] = None - picture: Optional[Picture] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessagingMember: - entity_urn: Optional[URN] = None - mini_profile: Optional[MiniProfile] = None - alternate_name: Optional[str] = None - alternate_image: Optional[Picture] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Paging: - count: int = 0 - start: int = 0 - links: list[Any] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class TextEntity: - urn: Optional[URN] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class AttributeType: - text_entity: Optional[TextEntity] = field( - metadata=config(field_name="com.linkedin.pemberly.text.Entity"), default=None - ) - - -@dataclass_json -@dataclass -class Attribute: - start: int = 0 - length: int = 0 - type_: Optional[AttributeType] = field(metadata=config(field_name="type"), default=None) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class AttributedBody: - text: str = "" - attributes: list[Attribute] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageAttachmentCreate: - byte_size: int = 0 - id_: Optional[URN] = field(metadata=config(field_name="id"), default=None) - media_type: str = "" - name: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageAttachmentReference: - string: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageAttachment: - id_: Optional[URN] = field(metadata=config(field_name="id"), default=None) - byte_size: int = 0 - media_type: str = "" - name: str = "" - reference: Optional[MessageAttachmentReference] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class AudioMetadata: - urn: Optional[URN] - duration: int = 0 - url: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MediaAttachment: - media_type: str = "" - audio_metadata: Optional[AudioMetadata] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class GifInfo: - original_height: int = 0 - original_width: int = 0 - url: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ThirdPartyMediaInfo: - previewgif: Optional[GifInfo] = None - nanogif: Optional[GifInfo] = None - gif: Optional[GifInfo] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ThirdPartyMedia: - media_type: str = "" - id_: str = field(metadata=config(field_name="id"), default="") - media: Optional[ThirdPartyMediaInfo] = None - title: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class LegalText: - static_legal_text: str = "" - custom_legal_text: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class SpInmailStandardSubContent: - action: str = "" - action_text: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class SpInmailSubContent: - standard: Optional[SpInmailStandardSubContent] = field( - metadata=config( - field_name="com.linkedin.voyager.messaging.event.message.spinmail.SpInmailStandardSubContent" # noqa: E501 - ), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class SpInmailContent: - status: str = "" - sp_inmail_type: str = "" - advertiser_label: str = "" - body: str = "" - legal_text: Optional[LegalText] = None - sub_content: Optional[SpInmailSubContent] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ConversationNameUpdateContent: - new_name: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageCustomContent: - conversation_name_update_content: Optional[ConversationNameUpdateContent] = field( - metadata=config( - field_name="com.linkedin.voyager.messaging.event.message.ConversationNameUpdateContent" # noqa: E501 - ), - default=None, - ) - sp_inmail_content: Optional[SpInmailContent] = field( - metadata=config( - field_name="com.linkedin.voyager.messaging.event.message.spinmail.SpInmailContent" # noqa: E501 - ), - default=None, - ) - third_party_media: Optional[ThirdPartyMedia] = field( - metadata=config(field_name="com.linkedin.voyager.messaging.shared.ThirdPartyMedia"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class CommentaryText: - text: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Commentary: - text: Optional[CommentaryText] - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class NavigationContext: - tracking_action_type: str = "" - action_target: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ArticleComponent: - navigation_context: Optional[NavigationContext] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ImageAttributes: - vector_image: Optional[VectorImage] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Image: - attributes: list[ImageAttributes] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ImageComponent: - images: list[Image] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Document: - transcribed_document_url: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class DocumentComponent: - document: Optional[Document] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class StreamLocations: - url: str = "" - expires_at: int = -1 - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ProgressiveStreams: - width: int = -1 - height: int = -1 - size: int = -1 - media_type: str = "" - streaming_locations: list[StreamLocations] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class VideoPlayMetadata: - progressive_streams: list[ProgressiveStreams] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class VideoComponent: - video_play_metadata: Optional[VideoPlayMetadata] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ArticleContent: - image_component: Optional[ImageComponent] = field( - metadata=config(field_name="com.linkedin.voyager.feed.render.ImageComponent"), - default=None, - ) - video_component: Optional[VideoComponent] = field( - metadata=config(field_name="com.linkedin.voyager.feed.render.LinkedInVideoComponent"), - default=None, - ) - document_component: Optional[DocumentComponent] = field( - metadata=config(field_name="com.linkedin.voyager.feed.render.DocumentComponent"), - default=None, - ) - article_component: Optional[ArticleComponent] = field( - metadata=config(field_name="com.linkedin.voyager.feed.render.ArticleComponent"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ActorName: - text: str = "" - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Actor: - name: Optional[ActorName] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class FeedUpdate: - actor: Optional[Actor] = None - commentary: Optional[Commentary] = None - content: Optional[ArticleContent] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageEvent: - body: str = "" - feed_update: Optional[FeedUpdate] = None - message_body_render_format: str = "" - subject: Optional[str] = None - recalled_at: Optional[datetime] = None - last_edited_at: Optional[datetime] = None - attributed_body: Optional[AttributedBody] = None - attachments: list[MessageAttachment] = field(default_factory=list) - media_attachments: list[MediaAttachment] = field(default_factory=list) - custom_content: Optional[MessageCustomContent] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class EventContent: - message_event: Optional[MessageEvent] = field( - metadata=config(field_name="com.linkedin.voyager.messaging.event.MessageEvent"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class From: - messaging_member: Optional[MessagingMember] = field( - metadata=config(field_name="com.linkedin.voyager.messaging.MessagingMember"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ReactionSummary: - count: int = 0 - first_reacted_at: Optional[datetime] = None - emoji: str = "" - viewer_reacted: bool = False - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ConversationEvent: - created_at: Optional[datetime] = None - entity_urn: Optional[URN] = None - event_content: Optional[EventContent] = None - subtype: str = "" - from_: Optional[From] = field(metadata=config(field_name="from"), default=None) - previous_event_in_conversation: Optional[URN] = None - reaction_summaries: list[ReactionSummary] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Participant: - messaging_member: Optional[MessagingMember] = field( - metadata=config(field_name="com.linkedin.voyager.messaging.MessagingMember"), - default=None, - ) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Conversation: - group_chat: bool = False - total_event_count: int = 0 - unread_count: int = 0 - read: Optional[bool] = None - last_activity_at: Optional[datetime] = None - entity_urn: Optional[URN] = None - name: str = "" - muted: bool = False - events: list[ConversationEvent] = field(default_factory=list) - participants: list[Participant] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ConversationsResponse(DataClassJsonMixin): - elements: list[Conversation] = field(default_factory=list) - paging: Optional[Paging] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ConversationResponse(DataClassJsonMixin): - elements: list[ConversationEvent] = field(default_factory=list) - paging: Optional[Paging] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageCreate(DataClassJsonMixin): - attributed_body: Optional[AttributedBody] = None - body: str = "" - attachments: list[MessageAttachmentCreate] = field(default_factory=list) - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class MessageCreatedInfo: - created_at: Optional[datetime] = None - event_urn: Optional[URN] = None - backend_event_urn: Optional[URN] = None - conversation_urn: Optional[URN] = None - backend_conversation_urn: Optional[URN] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class SendMessageResponse(DataClassJsonMixin): - value: Optional[MessageCreatedInfo] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class UserProfileResponse(DataClassJsonMixin): - plain_id: str = "" - mini_profile: Optional[MiniProfile] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class SeenReceipt: - event_urn: URN - seen_at: Optional[datetime] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class RealTimeEventStreamEvent(DataClassJsonMixin): - # Action real-time events (marking as read for example) - action: Optional[str] = None - conversation: Optional[Union[Conversation, URN]] = None - - # Message real-time events - previous_event_in_conversation: Optional[URN] = None - event: Optional[ConversationEvent] = None - - # Reaction real-time events - reaction_added: Optional[bool] = None - actor_mini_profile_urn: Optional[URN] = None - event_urn: Optional[URN] = None - reaction_summary: Optional[ReactionSummary] = None - - # Seen Receipt real-time events - from_entity: Optional[URN] = None - seen_receipt: Optional[SeenReceipt] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ReactorProfile: - first_name: str = "" - last_name: str = "" - entity_urn: Optional[URN] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Reactor: - reactor_urn: Optional[URN] = None - reactor: Optional[ReactorProfile] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class ReactorsResponse(DataClassJsonMixin): - elements: list[Reactor] = field(default_factory=list) - paging: Optional[Paging] = None - - -@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) -@dataclass -class Error(DataClassJsonMixin, Exception): - status: int = -1 diff --git a/linkedin_messaging/exceptions.py b/linkedin_messaging/exceptions.py deleted file mode 100644 index 951ce1f..0000000 --- a/linkedin_messaging/exceptions.py +++ /dev/null @@ -1,2 +0,0 @@ -class TooManyRequestsError(Exception): - pass diff --git a/linkedin_messaging/linkedin.py b/linkedin_messaging/linkedin.py deleted file mode 100644 index 3ee52df..0000000 --- a/linkedin_messaging/linkedin.py +++ /dev/null @@ -1,659 +0,0 @@ -from typing import Any, AsyncGenerator, Awaitable, Callable, Optional, TypeVar, Union, cast -from collections import defaultdict -from datetime import datetime -import asyncio -import json -import logging -import uuid - -from bs4 import BeautifulSoup -from dataclasses_json.api import DataClassJsonMixin -import aiohttp -import aiohttp.client_exceptions - -from .api_objects import ( - URN, - Conversation, - ConversationResponse, - ConversationsResponse, - Error, - MessageAttachmentCreate, - MessageCreate, - Picture, - ReactorsResponse, - RealTimeEventStreamEvent, - SendMessageResponse, - UserProfileResponse, -) -from .exceptions import TooManyRequestsError - -LINKEDIN_BASE_URL = "https://www.linkedin.com" -LOGIN_URL = f"{LINKEDIN_BASE_URL}/checkpoint/lg/login-submit" -LOGOUT_URL = f"{LINKEDIN_BASE_URL}/uas/logout" -REALTIME_CONNECT_URL = f"{LINKEDIN_BASE_URL}/realtime/connect" -VERIFY_URL = f"{LINKEDIN_BASE_URL}/checkpoint/challenge/verify" -API_BASE_URL = f"{LINKEDIN_BASE_URL}/voyager/api" -CONNECTIVITY_TRACKING_URL = ( - f"{LINKEDIN_BASE_URL}/realtime/realtimeFrontendClientConnectivityTracking" -) - -SEED_URL = f"{LINKEDIN_BASE_URL}/login" -""" -URL to seed all of the auth requests -""" - -T = TypeVar("T", bound=DataClassJsonMixin) - - -async def try_from_json(deserialise_to: T, response: aiohttp.ClientResponse) -> T: - if response.status < 200 or 300 <= response.status: - try: - error = Error.from_json(await response.text()) - except Exception: - raise Exception( - f"Deserialising to {deserialise_to} failed because response " - f"was {response.status}. Details: {await response.text()}" - ) - raise error - - text = await response.text() - try: - return deserialise_to.from_json(text) - except (json.JSONDecodeError, ValueError) as e: - try: - error = Error.from_json(text) - except Exception: - raise Exception( - f"Deserialising to {deserialise_to} failed. Error: {e}. " f"Response: {text}." - ) - raise error - - -class ChallengeException(Exception): - pass - - -fallback_headers = { - "user-agent": " ".join( - [ - "Mozilla/5.0 (X11; Linux x86_64)", - "AppleWebKit/537.36 (KHTML, like Gecko)", - "Chrome/120.0.0.0 Safari/537.36", - ] - ), - "accept-language": "en-US,en;q=0.9", - "x-li-lang": "en_US", - "x-restli-protocol-version": "2.0.0", - "x-li-track": json.dumps( - { - "clientVersion": "1.13.8751", - "mpVersion": "1.13.8751", - "osName": "web", - "timezoneOffset": -7, - "timezone": "America/Denver", - "deviceFormFactor": "DESKTOP", - "mpName": "voyager-web", - "displayDensity": 1, - "displayWidth": 2560, - "displayHeight": 1440, - } - ), - "Authority": "www.linkedin.com", - "referer": "https://www.linkedin.com/feed/", - "sec-ch-ua": '"Not_A Brand";v="8", "Chromium";v="120"', - "sec-ch-ua-mobile": "?0", - "sec-ch-ua-platform": '"Linux"', - "sec-fetch-dest": "empty", - "sec-fetch-mode": "cors", - "sec-fetch-site": "same-origin", - "x-li-page-instance": "urn:li:page:feed_index_index;bcfe9fd6-239a-49e9-af15-44b7e5895eaa", - "x-li-recipe-accept": "application/vnd.linkedin.normalized+json+2.1", - "x-li-recipe-map": json.dumps({ - "inAppAlertsTopic": "com.linkedin.voyager.dash.deco.identity.notifications.InAppAlert-51", - "professionalEventsTopic": "com.linkedin.voyager.dash.deco.events.ProfessionalEventDetailPage-53", # noqa: E501 - "topCardLiveVideoTopic": "com.linkedin.voyager.dash.deco.video.TopCardLiveVideo-9", - }), -} - - -class LinkedInMessaging: - session: aiohttp.ClientSession - two_factor_payload: dict[str, Any] - event_listeners: defaultdict[ - str, - list[ - Union[ - Callable[[RealTimeEventStreamEvent], Awaitable[None]], - Callable[[Exception], Awaitable[None]], - ] - ], - ] - headers: dict[str, str] - - using_headers_from_user = False - - _realtime_session_id: uuid.UUID - _realtime_connection_id: Optional[uuid.UUID] = None - - def __init__(self): - self._heartbeat_task = None - self.session = aiohttp.ClientSession() - self.event_listeners = defaultdict(list) - - def update_headers_from_cookies(self): - self.headers["csrf-token"] = self.cookies()["JSESSIONID"].strip('"') - - @staticmethod - def from_cookies_and_headers(cookies: dict[str, str], - headers: Optional[dict[str, str]]) -> "LinkedInMessaging": - linkedin = LinkedInMessaging() - linkedin.session.cookie_jar.update_cookies(cookies) - - if headers: - linkedin.headers = headers - linkedin.using_headers_from_user = True - else: - linkedin.headers = fallback_headers - linkedin.update_headers_from_cookies() - - # Skip these headers, including them will result in events being received in a newer - # format that we don't support - linkedin.headers.pop("x-li-query-map", None) - linkedin.headers.pop("x-li-query-accept", None) - linkedin.headers.pop("x-li-accept", None) - - return linkedin - - def cookies(self) -> dict[str, str]: - return {c.key: c.value for c in self.session.cookie_jar} - - async def close(self): - await self.session.close() - - async def _get(self, relative_url: str, **kwargs: Any) -> aiohttp.ClientResponse: - headers = kwargs.pop("headers", {}) - headers.update(self.headers) - return await self.session.get( - API_BASE_URL + relative_url, - headers=headers, - **kwargs - ) - - async def _post(self, relative_url: str, **kwargs: Any) -> aiohttp.ClientResponse: - headers = kwargs.pop("headers", {}) - headers.update(self.headers) - return await self.session.post( - API_BASE_URL + relative_url, - headers=headers, - **kwargs - ) - - # region Authentication - - @property - def has_auth_cookies(self) -> bool: - cookie_names = {c.key for c in self.session.cookie_jar} - return "li_at" in cookie_names and "JSESSIONID" in cookie_names - - async def logged_in(self) -> bool: - if not self.has_auth_cookies: - return False - try: - return bool(await self.get_user_profile()) - except Exception as e: - logging.exception(f"Failed getting the user profile: {e}") - return False - - async def login(self, email: str, password: str, new_session: bool = True): - if new_session: - if self.session: - await self.session.close() - self.session = aiohttp.ClientSession() - - # Get the CSRF token. - async with self.session.get(SEED_URL) as seed_response: - if seed_response.status != 200: - raise Exception("Couldn't open the CSRF seed page") - - soup = BeautifulSoup(await seed_response.text(), "html.parser") - login_csrf_param = soup.find("input", {"name": "loginCsrfParam"})["value"] - - # Login with username and password - async with self.session.post( - LOGIN_URL, - data={ - "loginCsrfParam": login_csrf_param, - "session_key": email, - "session_password": password, - }, - ) as login_response: - # Check to see if the user was successfully logged in with just email and - # password. - if self.has_auth_cookies: - self.update_headers_from_cookies() - return - - # 2FA is required. Throw an exception. - soup = BeautifulSoup(await login_response.text(), "html.parser") - - # TODO (#1) better detection of 2FA vs bad password - if soup.find("input", {"name": "challengeId"}): - self.two_factor_payload = { - k: soup.find("input", {"name": k})["value"] - for k in ( - "csrfToken", - "pageInstance", - "resendUrl", - "challengeId", - "displayTime", - "challengeSource", - "requestSubmissionId", - "challengeType", - "challengeData", - "challengeDetails", - "failureRedirectUri", - "flowTreeId", - ) - } - self.two_factor_payload["language"] = "en-US" - self.two_factor_payload["recognizedDevice"] = "on" - raise ChallengeException() - - # TODO (#1) can we scrape anything from the page? - raise Exception("Failed to log in.") - - async def enter_2fa(self, two_factor_code: str): - async with self.session.post( - VERIFY_URL, data={**self.two_factor_payload, "pin": two_factor_code} - ): - if self.has_auth_cookies: - self.update_headers_from_cookies() - return - # TODO (#1) can we scrape anything from the page? - raise Exception("Failed to log in.") - - async def logout(self) -> bool: - csrf_token = self.headers.get("csrf-token") - if not csrf_token: - return True - response = await self.session.get( - LOGOUT_URL, - params={"csrfToken": csrf_token}, - allow_redirects=False, - ) - return response.status == 303 - - # endregion - - # region Conversations - - async def get_conversations( - self, - last_activity_before: Optional[datetime] = None, - ) -> ConversationsResponse: - """ - Fetch list of conversations the user is in. - - :param last_activity_before: :class:`datetime` of the last chat activity to - consider - """ - if last_activity_before is None: - last_activity_before = datetime.now() - - params = { - "keyVersion": "LEGACY_INBOX", - # For some reason, createdBefore is the key, even though that makes - # absolutely no sense whatsoever. - "createdBefore": int(last_activity_before.timestamp() * 1000), - } - - res = await self._get("/messaging/conversations", params=params) - return cast(ConversationsResponse, await try_from_json(ConversationsResponse, res)) - - async def get_all_conversations(self) -> AsyncGenerator[Conversation, None]: - """ - A generator of all of the user's conversations using paging. - """ - last_activity_before = datetime.now() - while True: - conversations_response = await self.get_conversations( - last_activity_before=last_activity_before - ) - for c in conversations_response.elements: - yield c - - # The page size is 20, by default, so if we get less than 20, we are at the - # end of the list so we should stop. - if len(conversations_response.elements) < 20: - break - - if last_activity_at := conversations_response.elements[-1].last_activity_at: - last_activity_before = last_activity_at - else: - break - - async def get_conversation( - self, - conversation_urn: URN, - created_before: Optional[datetime] = None, - ) -> ConversationResponse: - """ - Fetch the given conversation. - - :param conversation_urn_id: LinkedIn URN for a conversation - :param created_before: datetime of the last chat activity to consider - """ - if len(conversation_urn.id_parts) != 1: - raise TypeError(f"Invalid conversation URN {conversation_urn}.") - - if created_before is None: - created_before = datetime.now() - - params = { - "createdBefore": int(created_before.timestamp() * 1000), - } - - res = await self._get( - f"/messaging/conversations/{conversation_urn.id_parts[0]}/events", - params=params, - ) - return cast(ConversationResponse, await try_from_json(ConversationResponse, res)) - - async def mark_conversation_as_read(self, conversation_urn: URN) -> bool: - res = await self._post( - f"/messaging/conversations/{conversation_urn.id_parts[-1]}", - json={"patch": {"$set": {"read": True}}}, - ) - return res.status == 200 - - # endregion - - # region Messages - - async def upload_media( - self, - data: bytes, - filename: str, - media_type: str, - ) -> MessageAttachmentCreate: - upload_metadata_response = await self._post( - "/voyagerMediaUploadMetadata", - params={"action": "upload"}, - json={ - "mediaUploadType": "MESSAGING_PHOTO_ATTACHMENT", - "fileSize": len(data), - "filename": filename, - }, - ) - if upload_metadata_response.status != 200: - raise Exception("Failed to send upload metadata.") - - upload_metadata_response_json = (await upload_metadata_response.json()).get("value", {}) - upload_url = upload_metadata_response_json.get("singleUploadUrl") - if not upload_url: - raise Exception("No upload URL provided") - - upload_response = await self.session.put(upload_url, data=data) - if upload_response.status != 201: - # TODO (#2) is there any other data that we get? - raise Exception("Failed to upload file.") - - return MessageAttachmentCreate( - len(data), - URN(upload_metadata_response_json.get("urn")), - media_type, - filename, - ) - - async def send_message( - self, - conversation_urn_or_recipients: Union[URN, list[URN]], - message_create: MessageCreate, - ) -> SendMessageResponse: - params = {"action": "create"} - message_create_key = "com.linkedin.voyager.messaging.create.MessageCreate" - - message_event: dict[str, Any] = { - "eventCreate": {"value": {message_create_key: message_create.to_dict()}} - } - - if isinstance(conversation_urn_or_recipients, list): - message_event["recipients"] = [r.get_id() for r in conversation_urn_or_recipients] - message_event["subtype"] = "MEMBER_TO_MEMBER" - payload = { - "keyVersion": "LEGACY_INBOX", - "conversationCreate": message_event, - } - res = await self._post( - "/messaging/conversations", - params=params, - json=payload, - ) - else: - conversation_id = conversation_urn_or_recipients.get_id() - res = await self._post( - f"/messaging/conversations/{conversation_id}/events", - params=params, - json=message_event, - ) - - return cast(SendMessageResponse, await try_from_json(SendMessageResponse, res)) - - async def delete_message(self, conversation_urn: URN, message_urn: URN) -> bool: - res = await self._post( - "/messaging/conversations/{}/events/{}".format( - conversation_urn, message_urn.id_parts[-1] - ), - params={"action": "recall"}, - ) - return res.status == 204 - - async def download_linkedin_media(self, url: str) -> bytes: - async with self.session.get(url) as media_resp: - if not media_resp.ok: - raise Exception(f"Failed downloading media. Response code {media_resp.status}") - return await media_resp.content.read() - - # endregion - - # region Reactions - - async def add_emoji_reaction( - self, - conversation_urn: URN, - message_urn: URN, - emoji: str, - ) -> bool: - res = await self._post( - "/messaging/conversations/{}/events/{}".format( - conversation_urn, message_urn.id_parts[-1] - ), - params={"action": "reactWithEmoji"}, - json={"emoji": emoji}, - ) - return res.status == 204 - - async def remove_emoji_reaction( - self, - conversation_urn: URN, - message_urn: URN, - emoji: str, - ) -> bool: - res = await self._post( - "/messaging/conversations/{}/events/{}".format( - conversation_urn, message_urn.id_parts[-1] - ), - params={"action": "unreactWithEmoji"}, - json={"emoji": emoji}, - ) - return res.status == 204 - - async def get_reactors(self, message_urn: URN, emoji: str) -> ReactorsResponse: - params = { - "decorationId": "com.linkedin.voyager.dash.deco.messaging.FullReactor-8", - "emoji": emoji, - "messageUrn": f"urn:li:fsd_message:{message_urn.id_parts[-1]}", - "q": "messageAndEmoji", - } - res = await self._get("/voyagerMessagingDashReactors", params=params) - return cast(ReactorsResponse, await try_from_json(ReactorsResponse, res)) - - # endregion - - # region Typing Notifications - - async def set_typing(self, conversation_urn: URN): - await self._post( - "/messaging/conversations", - params={"action": "typing"}, - json={"conversationId": conversation_urn.get_id()}, - ) - - # endregion - - # region Profiles - - async def get_user_profile(self) -> UserProfileResponse: - res = await self._get("/me") - return cast(UserProfileResponse, await try_from_json(UserProfileResponse, res)) - - async def download_profile_picture(self, picture: Picture) -> bytes: - if not picture.vector_image: - raise Exception( - "Failed downloading media. Invalid Picture object with no vector_image." - ) - url = ( - picture.vector_image.root_url - + picture.vector_image.artifacts[-1].file_identifying_url_path_segment - ) - async with await self.session.get(url) as profile_resp: - if not profile_resp.ok: - raise Exception(f"Failed downloading media. Response code {profile_resp.status}") - return await profile_resp.content.read() - - # endregion - - # region Event Listener - - def add_event_listener( - self, - payload_key: str, - fn: Union[ - Callable[[RealTimeEventStreamEvent], Awaitable[None]], - Callable[[Exception], Awaitable[None]], - ], - ): - """ - There is one special event type: - - * ``ALL_EVENTS`` - an event fired on every event, and which contains the entirety of the - raw event payload - """ - self.event_listeners[payload_key].append(fn) - - async def _fire(self, payload_key: str, event: Any): - for listener in self.event_listeners[payload_key]: - try: - await listener(event) - except Exception: - logging.exception(f"Listener {listener} failed to handle {event}") - - async def _listen_to_event_stream(self): - logging.info("Starting event stream listener") - - headers = { - "accept": "text/event-stream", - **self.headers, - } - - async with self.session.get( - REALTIME_CONNECT_URL, - headers=headers, - params={"rc": "1"}, - timeout=aiohttp.ClientTimeout(total=None), - ) as resp: - if resp.status != 200: - raise TooManyRequestsError(f"Failed to connect. Status {resp.status}.") - - while True: - line = await asyncio.wait_for(resp.content.readline(), timeout=20) - if resp.content.at_eof(): - break - - if not line.startswith(b"data:"): - continue - data = json.loads(line.decode("utf-8")[6:]) - - logging.debug(f"Got data from event stream {data.keys()}") - - # Special handling for ALL_EVENTS handler. - if all_events_handlers := self.event_listeners.get("ALL_EVENTS"): - for handler in all_events_handlers: - try: - await handler(data) - except Exception: - logging.exception(f"Handler {handler} failed to handle {data}") - - if cc := data.get("com.linkedin.realtimefrontend.ClientConnection", {}): - logging.info(f"Got realtime connection ID: {cc.get('id')}") - self._realtime_connection_id = uuid.UUID(cc.get("id")) - - event_payload = data.get("com.linkedin.realtimefrontend.DecoratedEvent", {}).get( - "payload", {} - ) - - if event_payload: - logging.debug(f"Firing events for keys {event_payload.keys()}") - - for key in self.event_listeners.keys(): - if event_payload.get(key) is not None: - await self._fire(key, - RealTimeEventStreamEvent.from_dict(event_payload)) - - logging.info("Event stream closed") - - async def _send_heartbeat(self, user_urn: URN): - is_first = True - mp_version = json.loads(self.headers["x-li-track"])["mpVersion"] - - logging.info(f"Starting heartbeat task with client version {mp_version}") - - while True: - await asyncio.sleep(60) - logging.info("Sending heartbeat") - - await self._post( - CONNECTIVITY_TRACKING_URL, - params={"action": "sendHeartbeat"}, - json={ - "isFirstHeartbeat": not is_first, - "isLastHeartbeat": False, - "realtimeSessionId": str(self._realtime_session_id), - "mpName": "voyager-web", - "mpVersion": mp_version, - "clientId": "voyager-web", - "actorUrn": str(user_urn), - "contextUrns": [str(user_urn)], - }, - ) - - is_first = False - - async def start_listener(self, user_urn: URN): - self._realtime_session_id = uuid.uuid4() - logging.info(f"Created realtime session ID: {self._realtime_session_id}") - while True: - try: - self._heartbeat_task = asyncio.create_task(self._send_heartbeat(user_urn)) - await self._listen_to_event_stream() - except asyncio.TimeoutError as te: - logging.exception(f"Timeout in listener: {te}") - raise - except Exception as e: - logging.exception(f"Got exception in listener: {e}") - raise - finally: - if not self._heartbeat_task.done(): - self._heartbeat_task.cancel() - - # endregion diff --git a/linkedin_messaging/py.typed b/linkedin_messaging/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/linkedin_messaging/test_urn.py b/linkedin_messaging/test_urn.py deleted file mode 100644 index 7761236..0000000 --- a/linkedin_messaging/test_urn.py +++ /dev/null @@ -1,13 +0,0 @@ -from .api_objects import URN - - -def test_urn_equivalence(): - assert URN("urn:123") == URN("123") - assert URN("urn:(123,456)") == URN("urn:test:(123,456)") - - -def test_urn_equivalence_in_tuple(): - assert (URN("urn:123"), URN("urn:(123,456)")) == ( - URN("123"), - URN("urn:test:(123,456)"), - ) diff --git a/optional-requirements.txt b/optional-requirements.txt deleted file mode 100644 index 5b9ecd5..0000000 --- a/optional-requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Format: #/name defines a new extras_require group called name -# Uncommented lines after the group definition insert things into that group. - -#/animated_stickers -pillow>=10.0.1,<11 - -#/e2be -python-olm>=3,<4 -unpaddedbase64>=1,<3 - -#/metrics -prometheus_client>=0.6,<0.20 - -#/proxy -pysocks -aiohttp-socks - -#/weblogin -setuptools - -#/sqlite -aiosqlite>=0.16,<0.20 diff --git a/pkg/connector/backfill.go b/pkg/connector/backfill.go new file mode 100644 index 0000000..e5e2e77 --- /dev/null +++ b/pkg/connector/backfill.go @@ -0,0 +1,61 @@ +package connector + +import ( + "context" + "sort" + "strconv" + + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/networkid" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" +) + +var _ bridgev2.BackfillingNetworkAPI = (*LinkedInClient)(nil) + +func (lc *LinkedInClient) FetchMessages(ctx context.Context, params bridgev2.FetchMessagesParams) (*bridgev2.FetchMessagesResponse, error) { + conversationUrn := string(params.Portal.PortalKey.ID) + + variables := query.FetchMessagesVariables{ + ConversationUrn: conversationUrn, + CountBefore: int64(params.Count), + } + + if params.Cursor == "" { + variables.DeliveredAt = params.AnchorMessage.Timestamp.UnixMilli() + } else { + cursorInt, err := strconv.Atoi(string(params.Cursor)) + if err != nil { + return nil, err + } + variables.DeliveredAt = int64(cursorInt) + } + + fetchMessages, err := lc.client.FetchMessages(variables) + if err != nil { + return nil, err + } + + messages := fetchMessages.Messages + sort.Slice(messages, func(j, i int) bool { + return messages[j].DeliveredAt < messages[i].DeliveredAt + }) + + if err != nil { + return nil, err + } + + backfilledMessages, err := lc.MessagesToBackfillMessages(ctx, messages, params.Portal) // get convo by id property missing + if err != nil { + return nil, err + } + + fetchMessagesResp := &bridgev2.FetchMessagesResponse{ + Messages: backfilledMessages, + Cursor: networkid.PaginationCursor(messages[0].DeliveredAt), + HasMore: len(messages) >= params.Count, + Forward: params.Forward, + } + + return fetchMessagesResp, nil +} diff --git a/pkg/connector/client.go b/pkg/connector/client.go new file mode 100644 index 0000000..187ca21 --- /dev/null +++ b/pkg/connector/client.go @@ -0,0 +1,201 @@ +package connector + +import ( + "context" + "fmt" + "time" + + "github.com/rs/zerolog" + "maunium.net/go/mautrix/bridge/status" + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/database" + "maunium.net/go/mautrix/bridgev2/networkid" + bridgeEvt "maunium.net/go/mautrix/event" + + "github.com/beeper/linkedin/pkg/linkedingo" + "github.com/beeper/linkedin/pkg/linkedingo/cookies" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type LinkedInClient struct { + connector *LinkedInConnector + client *linkedingo.Client + + userLogin *bridgev2.UserLogin + + userCache map[string]types.Member // look into +} + +var ( + _ bridgev2.NetworkAPI = (*LinkedInClient)(nil) +) + +func NewLinkedInClient(ctx context.Context, tc *LinkedInConnector, login *bridgev2.UserLogin) *LinkedInClient { + log := zerolog.Ctx(ctx).With(). + Str("component", "twitter_client"). + Str("user_login_id", string(login.ID)). + Logger() + + meta := login.Metadata.(*UserLoginMetadata) + clientOpts := &linkedingo.ClientOpts{ + Cookies: cookies.NewCookiesFromString(meta.Cookies), + } + twitClient := &LinkedInClient{ + client: linkedingo.NewClient(clientOpts, log), + userLogin: login, + userCache: make(map[string]types.Member), // todo change any + } + + //twitClient.client.SetEventHandler(twitClient.HandleTwitterEvent) // todo set event listener + twitClient.connector = tc + return twitClient +} + +func (lc *LinkedInClient) Connect(ctx context.Context) error { + if lc.client == nil { + lc.userLogin.BridgeState.Send(status.BridgeState{ + StateEvent: status.StateBadCredentials, + Error: "linkedin-not-logged-in", + }) + return nil + } + + err := lc.client.LoadMessagesPage() + if err != nil { + return fmt.Errorf("failed to load messages page") + } + + profile, err := lc.client.GetCurrentUserProfile() + + lc.userLogin.RemoteName = fmt.Sprintf("%s %s", profile.MiniProfile.FirstName, profile.MiniProfile.LastName) + lc.userLogin.Save(ctx) + + err = lc.client.Connect() + if err != nil { + return fmt.Errorf("failed to connect to linkedin client: %w", err) + } + lc.userLogin.BridgeState.Send(status.BridgeState{StateEvent: status.StateConnected}) + return nil +} + +func (lc *LinkedInClient) Disconnect() { + err := lc.client.Disconnect() + if err != nil { + lc.userLogin.Log.Error().Err(err).Msg("failed to disconnect, err:") + } +} + +func (lc *LinkedInClient) IsLoggedIn() bool { + return ValidCookieRegex.MatchString(lc.userLogin.Metadata.(UserLoginMetadata).Cookies) +} + +func (lc *LinkedInClient) LogoutRemote(ctx context.Context) { + log := zerolog.Ctx(ctx) + err := lc.client.Logout() + if err != nil { + log.Error().Err(err).Msg("error logging out") + } +} + +func (lc *LinkedInClient) IsThisUser(_ context.Context, userID networkid.UserID) bool { + return networkid.UserID(lc.client.GetCurrentUserID()) == userID +} + +func (lc *LinkedInClient) GetCurrentUser() (user *types.UserLoginProfile, err error) { + user, err = lc.client.GetCurrentUserProfile() + return +} + +func (lc *LinkedInClient) GetChatInfo(_ context.Context, portal *bridgev2.Portal) (*bridgev2.ChatInfo, error) { + // not supported + return nil, nil +} + +func (lc *LinkedInClient) GetUserInfo(_ context.Context, ghost *bridgev2.Ghost) (*bridgev2.UserInfo, error) { + userInfo := lc.GetUserInfoBridge(string(ghost.ID)) + if userInfo == nil { + return nil, fmt.Errorf("failed to find user info in cache by id: %s", ghost.ID) + } + return userInfo, nil +} + +func (lc *LinkedInClient) GetCapabilities(_ context.Context, _ *bridgev2.Portal) *bridgev2.NetworkRoomCapabilities { + return &bridgev2.NetworkRoomCapabilities{ // todo update + FormattedText: false, + UserMentions: true, + RoomMentions: false, + + Edits: true, + EditMaxCount: 10, + EditMaxAge: 15 * time.Minute, + Captions: true, + Replies: true, + Reactions: true, + ReactionCount: 1, + } +} + +func (lc *LinkedInClient) convertEditToMatrix(ctx context.Context, portal *bridgev2.Portal, intent bridgev2.MatrixAPI, existing []*database.Message, data *response.MessageElement) (*bridgev2.ConvertedEdit, error) { + converted, err := lc.convertToMatrix(ctx, portal, intent, data) + if err != nil { + return nil, err + } + return &bridgev2.ConvertedEdit{ + ModifiedParts: []*bridgev2.ConvertedEditPart{converted.Parts[0].ToEditPart(existing[0])}, + }, nil +} + +func (lc *LinkedInClient) convertToMatrix(ctx context.Context, portal *bridgev2.Portal, intent bridgev2.MatrixAPI, msg *response.MessageElement) (*bridgev2.ConvertedMessage, error) { + var replyTo *networkid.MessageOptionalPartID + parts := make([]*bridgev2.ConvertedMessagePart, 0) + + for _, renderContent := range msg.RenderContent { + if renderContent.RepliedMessageContent.OriginalMessageUrn != "" { + replyTo = &networkid.MessageOptionalPartID{ + MessageID: networkid.MessageID(renderContent.RepliedMessageContent.OriginalMessageUrn), + } + } else { + convertedPart, err := lc.LinkedInAttachmentToMatrix(ctx, portal, intent, renderContent) + if err != nil { + return nil, err + } + if convertedPart != nil { + parts = append(parts, convertedPart) + } + } + } + + textPart := &bridgev2.ConvertedMessagePart{ + ID: "", + Type: bridgeEvt.EventMessage, + Content: &bridgeEvt.MessageEventContent{ + MsgType: bridgeEvt.MsgText, + Body: msg.Body.Text, + }, + } + + if len(textPart.Content.Body) > 0 { + parts = append(parts, textPart) + } + + cm := &bridgev2.ConvertedMessage{ + ReplyTo: replyTo, + Parts: parts, + } + + cm.MergeCaption() // merges captions and media onto one part + + return cm, nil +} + +func (lc *LinkedInClient) MakePortalKey(thread response.ThreadElement) networkid.PortalKey { + var receiver networkid.UserLoginID + if !thread.GroupChat { + receiver = lc.userLogin.ID + } + return networkid.PortalKey{ + ID: networkid.PortalID(thread.EntityUrn), + Receiver: receiver, + } +} diff --git a/pkg/connector/client_sync.go b/pkg/connector/client_sync.go new file mode 100644 index 0000000..e8c398a --- /dev/null +++ b/pkg/connector/client_sync.go @@ -0,0 +1,85 @@ +package connector + +import ( + "context" + "fmt" + "sort" + "time" + + "github.com/rs/zerolog" + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/simplevent" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" +) + +func (lc *LinkedInClient) syncChannels(ctx context.Context) { + log := zerolog.Ctx(ctx) + + getThreadsVariables := query.GetThreadsVariables{ + LastUpdatedBefore: 0, + NextCursor: "", + } + conversations, err := lc.client.GetThreads(query.GetThreadsVariables{}) + if err != nil { + log.Error().Err(err).Msg("failed to fetch initial inbox state:") + return + } + + threads := conversations.Threads + getThreadsVariables.LastUpdatedBefore = threads[len(threads)-1].LastActivityAt + getThreadsVariables.NextCursor = conversations.Metadata.NextCursor + hasMore := true + + // loop until no more threads can be found + for hasMore == true { + moreConversations, err := lc.client.GetThreads(getThreadsVariables) + if err != nil { + log.Error().Err(err).Msg(fmt.Sprintf("failed to fetch threads in trusted inbox using cursor %v,%s:", getThreadsVariables.LastUpdatedBefore, getThreadsVariables.NextCursor)) + return + } + + hasMore = len(moreConversations.Threads) > 0 + + if !hasMore { + continue + } + + threads = append(threads, moreConversations.Threads...) + + getThreadsVariables.NextCursor = moreConversations.Metadata.NextCursor + getThreadsVariables.LastUpdatedBefore = threads[len(threads)-1].LastActivityAt + } + + for _, thread := range threads { + messages := thread.MessageElements.Messages + sort.Slice(messages, func(j, i int) bool { + return messages[j].DeliveredAt < messages[i].DeliveredAt + }) + + latestMessage := messages[len(messages)-1] + latestMessageTS := time.UnixMilli(latestMessage.DeliveredAt) + + for _, participant := range thread.ConversationParticipants { + if member, _ := lc.userCache[participant.HostIdentityUrn]; member.Type != "" { + continue + } + lc.userCache[participant.HostIdentityUrn] = participant.ParticipantType.Member + } + + evt := &simplevent.ChatResync{ + EventMeta: simplevent.EventMeta{ + Type: bridgev2.RemoteEventChatResync, + LogContext: func(c zerolog.Context) zerolog.Context { + return c. + Str("portal_key", thread.EntityUrn) + }, + PortalKey: lc.MakePortalKey(thread), + CreatePortal: true, + }, + ChatInfo: lc.ConversationToChatInfo(&thread), + LatestMessageTS: latestMessageTS, + } + lc.connector.br.QueueRemoteEvent(lc.userLogin, evt) + } +} diff --git a/pkg/connector/config.go b/pkg/connector/config.go new file mode 100644 index 0000000..779e321 --- /dev/null +++ b/pkg/connector/config.go @@ -0,0 +1,61 @@ +package connector + +import ( + _ "embed" + "strings" + "text/template" + + up "go.mau.fi/util/configupgrade" + "gopkg.in/yaml.v3" +) + +//go:embed example-config.yaml +var ExampleConfig string + +type Config struct { + DisplaynameTemplate string `yaml:"displayname_template"` + displaynameTemplate *template.Template `yaml:"-"` +} + +type umConfig Config + +func (c *Config) UnmarshalYAML(node *yaml.Node) error { + err := node.Decode((*umConfig)(c)) + if err != nil { + return err + } + + c.displaynameTemplate, err = template.New("displayname").Parse(c.DisplaynameTemplate) + if err != nil { + return err + } + return nil +} + +func upgradeConfig(helper up.Helper) { + helper.Copy(up.Str, "displayname_template") +} + +type DisplaynameParams struct { + FirstName string + LastName string +} + +func (c *Config) FormatDisplayname(firstName string, lastName string) string { + var nameBuf strings.Builder + err := c.displaynameTemplate.Execute(&nameBuf, &DisplaynameParams{ + FirstName: firstName, + LastName: lastName, + }) + if err != nil { + panic(err) + } + return nameBuf.String() +} + +func (lc *LinkedInConnector) GetConfig() (string, any, up.Upgrader) { + return ExampleConfig, &lc.Config, &up.StructUpgrader{ + SimpleUpgrader: up.SimpleUpgrader(upgradeConfig), + Base: ExampleConfig, + } +} diff --git a/pkg/connector/connector.go b/pkg/connector/connector.go new file mode 100644 index 0000000..4254805 --- /dev/null +++ b/pkg/connector/connector.go @@ -0,0 +1,62 @@ +// mautrix-twitter - A Matrix-Twitter puppeting bridge. +// Copyright (C) 2024 Tulir Asokan +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see . + +package connector + +import ( + "context" + + "maunium.net/go/mautrix/bridgev2" +) + +type LinkedInConnector struct { + br *bridgev2.Bridge + + Config Config +} + +var _ bridgev2.NetworkConnector = (*LinkedInConnector)(nil) + +func (lc *LinkedInConnector) Init(bridge *bridgev2.Bridge) { + lc.br = bridge +} + +func (lc *LinkedInConnector) Start(_ context.Context) error { + return nil +} + +func (lc *LinkedInConnector) GetName() bridgev2.BridgeName { + return bridgev2.BridgeName{ + DisplayName: "LinkedIn", + NetworkURL: "https://twitter.com", + NetworkIcon: "mxc://nevarro.space/cwsWnmeMpWSMZLUNblJHaIvP", + NetworkID: "linkedin", + BeeperBridgeType: "linkedin", + DefaultPort: 29327, + } +} + +func (lc *LinkedInConnector) GetCapabilities() *bridgev2.NetworkGeneralCapabilities { + return &bridgev2.NetworkGeneralCapabilities{} +} + +func (lc *LinkedInConnector) LoadUserLogin(ctx context.Context, login *bridgev2.UserLogin) error { + twitClient := NewLinkedInClient(ctx, lc, login) + + login.Client = twitClient + + return nil +} diff --git a/pkg/connector/dbmeta.go b/pkg/connector/dbmeta.go new file mode 100644 index 0000000..adfb9e4 --- /dev/null +++ b/pkg/connector/dbmeta.go @@ -0,0 +1,21 @@ +package connector + +import ( + "maunium.net/go/mautrix/bridgev2/database" +) + +func (lc *LinkedInConnector) GetDBMetaTypes() database.MetaTypes { + return database.MetaTypes{ + Reaction: nil, + Portal: nil, + Message: nil, + Ghost: nil, + UserLogin: func() any { + return &UserLoginMetadata{} + }, + } +} + +type UserLoginMetadata struct { + Cookies string `json:"cookies"` +} diff --git a/pkg/connector/example-config.yaml b/pkg/connector/example-config.yaml new file mode 100644 index 0000000..4a9ae49 --- /dev/null +++ b/pkg/connector/example-config.yaml @@ -0,0 +1,4 @@ +# Displayname template for LinkedIn users. +# .FirstName is replaced with the first name +# .LastName is replaced with the first name +displayname_template: "{{ .FirstName }} {{ .LastName }} (LinkedIn)" diff --git a/pkg/connector/handlematrix.go b/pkg/connector/handlematrix.go new file mode 100644 index 0000000..223eacc --- /dev/null +++ b/pkg/connector/handlematrix.go @@ -0,0 +1,150 @@ +package connector + +import ( + "context" + "fmt" + "time" + + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/database" + "maunium.net/go/mautrix/bridgev2/networkid" + "maunium.net/go/mautrix/event" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +var ( + _ bridgev2.ReactionHandlingNetworkAPI = (*LinkedInClient)(nil) + _ bridgev2.ReadReceiptHandlingNetworkAPI = (*LinkedInClient)(nil) + _ bridgev2.EditHandlingNetworkAPI = (*LinkedInClient)(nil) + _ bridgev2.TypingHandlingNetworkAPI = (*LinkedInClient)(nil) +) + +func (lc *LinkedInClient) HandleMatrixTyping(_ context.Context, msg *bridgev2.MatrixTyping) error { + if msg.IsTyping && msg.Type == bridgev2.TypingTypeText { + return lc.client.StartTyping(string(msg.Portal.ID)) + } + return nil +} + +func (lc *LinkedInClient) HandleMatrixMessage(ctx context.Context, msg *bridgev2.MatrixMessage) (message *bridgev2.MatrixMessageResponse, err error) { + conversationUrn := string(msg.Portal.ID) + sendMessagePayload := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: msg.Content.Body, + }, + ConversationUrn: conversationUrn, + RenderContentUnions: []payload.RenderContent{}, + }, + } + + if msg.ReplyTo != nil { + sendMessagePayload.Message.RenderContentUnions = append( + sendMessagePayload.Message.RenderContentUnions, + payload.RenderContent{ + RepliedMessageContent: &payload.RepliedMessageContent{ + OriginalSenderUrn: string(msg.ReplyTo.SenderID), + OriginalMessageUrn: string(msg.ReplyTo.ID), + OriginalSendAt: msg.ReplyTo.Timestamp.UnixMilli(), + //MessageBody: "", // todo add at some point + }, + }, + ) + } + + content := msg.Content + + switch content.MsgType { + case event.MsgText: + break + case event.MsgVideo, event.MsgImage: + if content.Body == content.FileName { + sendMessagePayload.Message.Body.Text = "" + } + + file := content.GetFile() + data, err := lc.connector.br.Bot.DownloadMedia(ctx, file.URL, file) + if err != nil { + return nil, err + } + + attachmentType := payload.MESSAGING_FILE_ATTACHMENT + if content.MsgType == event.MsgImage { + attachmentType = payload.MESSAGING_PHOTO_ATTACHMENT + } + + mediaMetadata, err := lc.client.UploadMedia(attachmentType, content.FileName, data, types.JSON_PLAINTEXT_UTF8) + if err != nil { + return nil, err + } + + lc.client.Logger.Debug().Any("media_metadata", mediaMetadata).Msg("Successfully uploaded media to LinkedIn's servers") + sendMessagePayload.Message.RenderContentUnions = append(sendMessagePayload.Message.RenderContentUnions, payload.RenderContent{ + File: &payload.File{ + AssetUrn: mediaMetadata.Urn, + Name: content.FileName, + MediaType: types.ContentType(content.Info.MimeType), + ByteSize: len(data), + }, + }) + default: + return nil, fmt.Errorf("%w %s", bridgev2.ErrUnsupportedMessageType, content.MsgType) + } + + resp, err := lc.client.SendMessage(sendMessagePayload) + if err != nil { + return nil, err + } + + return &bridgev2.MatrixMessageResponse{ + DB: &database.Message{ + ID: networkid.MessageID(resp.Data.EntityUrn), + MXID: msg.Event.ID, + Room: msg.Portal.PortalKey, + SenderID: networkid.UserID(lc.client.GetCurrentUserID()), + Timestamp: time.UnixMilli(resp.Data.DeliveredAt), + }, + }, nil +} + +func (lc *LinkedInClient) PreHandleMatrixReaction(_ context.Context, msg *bridgev2.MatrixReaction) (bridgev2.MatrixReactionPreResponse, error) { + return bridgev2.MatrixReactionPreResponse{ + SenderID: networkid.UserID(lc.userLogin.ID), + Emoji: msg.Content.RelatesTo.Key, + MaxReactions: 1, + }, nil +} + +func (lc *LinkedInClient) HandleMatrixReactionRemove(_ context.Context, msg *bridgev2.MatrixReactionRemove) error { + return lc.doHandleMatrixReaction(false, string(msg.TargetReaction.MessageID), msg.TargetReaction.Emoji) +} + +func (lc *LinkedInClient) HandleMatrixReaction(_ context.Context, msg *bridgev2.MatrixReaction) (reaction *database.Reaction, err error) { + return nil, lc.doHandleMatrixReaction(true, string(msg.TargetMessage.ID), msg.PreHandleResp.Emoji) +} + +func (lc *LinkedInClient) doHandleMatrixReaction(react bool, messageUrn, emoji string) error { + reactionPayload := payload.SendReactionPayload{ + MessageUrn: messageUrn, + } + err := lc.client.SendReaction(reactionPayload, react) + if err != nil { + return err + } + + lc.client.Logger.Debug().Any("payload", reactionPayload).Msg("Reaction response") + return nil +} + +func (lc *LinkedInClient) HandleMatrixReadReceipt(ctx context.Context, msg *bridgev2.MatrixReadReceipt) error { + _, err := lc.client.MarkThreadRead([]string{string(msg.Portal.ID)}, true) + return err +} + +func (lc *LinkedInClient) HandleMatrixEdit(_ context.Context, edit *bridgev2.MatrixEdit) error { + return lc.client.EditMessage(string(edit.EditTarget.ID), payload.MessageBody{ + Text: edit.Content.Body, + }) +} diff --git a/pkg/connector/login.go b/pkg/connector/login.go new file mode 100644 index 0000000..14b672c --- /dev/null +++ b/pkg/connector/login.go @@ -0,0 +1,123 @@ +package connector + +import ( + "context" + "fmt" + "regexp" + + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/database" + "maunium.net/go/mautrix/bridgev2/networkid" + + "github.com/beeper/linkedin/pkg/linkedingo" + linCookies "github.com/beeper/linkedin/pkg/linkedingo/cookies" +) + +type LinkedInLogin struct { + User *bridgev2.User + Cookies string + lc *LinkedInConnector +} + +var ( + LoginUrlRegex = regexp.MustCompile(`https?:\\/\\/(www\\.)?([\\w-]+\\.)*linkedin\\.com(\\/[^\\s]*)?`) + ValidCookieRegex = regexp.MustCompile(`\bJSESSIONID=[^;]+`) +) + +var ( + LoginStepIDCookies = "fi.mau.linkedin.login.enter_cookies" + LoginStepIDComplete = "fi.mau.linkedin.login.complete" +) + +var _ bridgev2.LoginProcessCookies = (*LinkedInLogin)(nil) + +func (lc *LinkedInConnector) GetLoginFlows() []bridgev2.LoginFlow { + return []bridgev2.LoginFlow{ + { + Name: "Cookies", + Description: "Log in with your LinkedIn account using your cookies", + ID: "cookies", + }, + } +} + +func (lc *LinkedInConnector) CreateLogin(_ context.Context, user *bridgev2.User, flowID string) (bridgev2.LoginProcess, error) { + if flowID != "cookies" { + return nil, fmt.Errorf("unknown login flow ID: %s", flowID) + } + return &LinkedInLogin{User: user, lc: lc}, nil +} + +func (l *LinkedInLogin) Start(_ context.Context) (*bridgev2.LoginStep, error) { + return &bridgev2.LoginStep{ + Type: bridgev2.LoginStepTypeCookies, + StepID: LoginStepIDCookies, + Instructions: "Open the Login URL in an Incognito/Private browsing mode. Then, extract the Cookie header as a string/cURL command copied from the Network tab of your browser's DevTools. After that, close the browser **before** pasting the header.", + CookiesParams: &bridgev2.LoginCookiesParams{ + URL: "https://linkedin.com/login", + UserAgent: "", + Fields: []bridgev2.LoginCookieField{ + { + ID: "cookie", + Required: true, + Sources: []bridgev2.LoginCookieFieldSource{ + {Type: bridgev2.LoginCookieTypeRequestHeader, Name: "Cookie", RequestURLRegex: LoginUrlRegex.String()}, + }, + Pattern: ValidCookieRegex.String(), + }, + }, + }, + }, nil +} + +func (l *LinkedInLogin) Cancel() {} + +func (l *LinkedInLogin) SubmitCookies(ctx context.Context, cookies map[string]string) (*bridgev2.LoginStep, error) { + cookieStruct := linCookies.NewCookiesFromString(cookies["cookie"]) + + meta := &UserLoginMetadata{ + Cookies: cookieStruct.String(), + } + + clientOpts := &linkedingo.ClientOpts{ + Cookies: cookieStruct, + } + client := linkedingo.NewClient(clientOpts, l.User.Log) + + err := client.LoadMessagesPage() + if err != nil { + return nil, fmt.Errorf("failed to load messages page after submitting cookies") + } + + profile, err := client.GetCurrentUserProfile() + + id := networkid.UserLoginID(client.GetCurrentUserID()) + ul, err := l.User.NewLogin( + ctx, + &database.UserLogin{ + ID: id, + Metadata: meta, + RemoteName: fmt.Sprintf("%s %s", profile.MiniProfile.FirstName, profile.MiniProfile.LastName), + }, + &bridgev2.NewLoginParams{ + DeleteOnConflict: true, + DontReuseExisting: false, + LoadUserLogin: l.lc.LoadUserLogin, + }, + ) + if err != nil { + return nil, err + } + + ul.Client.Connect(ctx) + + return &bridgev2.LoginStep{ + Type: bridgev2.LoginStepTypeComplete, + StepID: LoginStepIDComplete, + Instructions: fmt.Sprintf("Successfully logged into @%s", ul.UserLogin.RemoteName), + CompleteParams: &bridgev2.LoginCompleteParams{ + UserLoginID: ul.ID, + UserLogin: ul, + }, + }, nil +} diff --git a/pkg/connector/mapping.go b/pkg/connector/mapping.go new file mode 100644 index 0000000..28de550 --- /dev/null +++ b/pkg/connector/mapping.go @@ -0,0 +1,292 @@ +package connector + +import ( + "context" + "fmt" + "io" + "net/http" + "path" + "time" + + "go.mau.fi/util/ptr" + "maunium.net/go/mautrix/bridgev2" + "maunium.net/go/mautrix/bridgev2/database" + "maunium.net/go/mautrix/bridgev2/networkid" + bridgeEvt "maunium.net/go/mautrix/event" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +func MakeAvatar(avatarURL string) *bridgev2.Avatar { + return &bridgev2.Avatar{ + ID: networkid.AvatarID(avatarURL), + Get: func(ctx context.Context) ([]byte, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, avatarURL, nil) + if err != nil { + return nil, fmt.Errorf("failed to prepare request: %w", err) + } + + getResp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to download avatar: %w", err) + } + + data, err := io.ReadAll(getResp.Body) + _ = getResp.Body.Close() + if err != nil { + return nil, fmt.Errorf("failed to read avatar data: %w", err) + } + return data, err + }, + Remove: avatarURL == "", + } +} + +func (lc *LinkedInClient) ConversationToChatInfo(thread *response.ThreadElement) *bridgev2.ChatInfo { + memberList := lc.ParticipantsToMemberList(thread.ConversationParticipants) + return &bridgev2.ChatInfo{ + Name: &thread.Title, + Members: memberList, + Type: lc.ConversationTypeToRoomType(thread.GroupChat), + CanBackfill: true, + } +} + +func (lc *LinkedInClient) ConversationTypeToRoomType(isGroupChat bool) *database.RoomType { + var roomType database.RoomType + if isGroupChat { + roomType = database.RoomTypeGroupDM + } else { + roomType = database.RoomTypeDM + } + return &roomType +} + +func (lc *LinkedInClient) ParticipantsToMemberList(participants []types.ConversationParticipant) *bridgev2.ChatMemberList { + selfUserId := lc.client.GetCurrentUserID() + memberMap := map[networkid.UserID]bridgev2.ChatMember{} + for _, participant := range participants { + memberMap[networkid.UserID(participant.HostIdentityUrn)] = lc.ParticipantToChatMember(participant, participant.HostIdentityUrn == selfUserId) + } + + return &bridgev2.ChatMemberList{ + IsFull: true, + TotalMemberCount: len(participants), + MemberMap: memberMap, + } +} + +func (lc *LinkedInClient) ParticipantToChatMember(participant types.ConversationParticipant, isFromMe bool) bridgev2.ChatMember { + member := participant.ParticipantType.Member + if participant.ParticipantType.Organization != nil || participant.ParticipantType.Custom != nil { + return bridgev2.ChatMember{} + } + return bridgev2.ChatMember{ + EventSender: bridgev2.EventSender{ + IsFromMe: isFromMe, + Sender: networkid.UserID(participant.HostIdentityUrn), + }, + UserInfo: lc.getUserInfoMember(member), + } +} + +func (lc *LinkedInClient) GetUserInfoBridge(userUrn string) *bridgev2.UserInfo { + var userinfo *bridgev2.UserInfo + if member, ok := lc.userCache[userUrn]; ok { // implement user cache + userinfo = lc.getUserInfoMember(member) + } + return userinfo +} + +func (lc *LinkedInClient) getUserInfoMember(member types.Member) *bridgev2.UserInfo { + return &bridgev2.UserInfo{ + Name: ptr.Ptr(lc.connector.Config.FormatDisplayname(member.FirstName.Text, member.LastName.Text)), + Avatar: MakeAvatar(member.ProfilePicture.RootURL), + Identifiers: []string{fmt.Sprintf("linkedin:%s", path.Base(member.ProfileURL))}, + } +} + +func (lc *LinkedInClient) MessagesToBackfillMessages(ctx context.Context, messages []response.MessageElement, portal *bridgev2.Portal) ([]*bridgev2.BackfillMessage, error) { + backfilledMessages := make([]*bridgev2.BackfillMessage, 0) + for _, msg := range messages { + backfilledMessage, err := lc.MessageToBackfillMessage(ctx, msg, portal) + if err != nil { + return nil, err + } + backfilledMessages = append(backfilledMessages, backfilledMessage) + } + + return backfilledMessages, nil +} + +func (lc *LinkedInClient) MessageToBackfillMessage(ctx context.Context, message response.MessageElement, portal *bridgev2.Portal) (*bridgev2.BackfillMessage, error) { + messageReactions, err := lc.MessageReactionsToBackfillReactions(message.ReactionSummaries, message.EntityUrn) + if err != nil { + return nil, err + } + + sentAt := time.UnixMilli(message.DeliveredAt) + + intent := lc.userLogin.Bridge.Matrix.BotIntent() + if err != nil { + return nil, err + } + + cm, err := lc.convertToMatrix(ctx, portal, intent, &message) + if err != nil { + return nil, err + } + + return &bridgev2.BackfillMessage{ + ConvertedMessage: cm, + Sender: bridgev2.EventSender{ + IsFromMe: message.Sender.EntityUrn == lc.client.GetCurrentUserID(), + Sender: networkid.UserID(message.Sender.EntityUrn), + }, + ID: networkid.MessageID(message.EntityUrn), + Timestamp: sentAt, + Reactions: messageReactions, + }, nil +} + +func (lc *LinkedInClient) MessageReactionsToBackfillReactions(reactions []response.ReactionSummary, messageUrn string) ([]*bridgev2.BackfillReaction, error) { + backfillReactions := make([]*bridgev2.BackfillReaction, 0) + for _, reaction := range reactions { + participants, err := lc.client.GetReactionsForEmoji(query.GetReactionsForEmojiVariables{ + Emoji: reaction.Emoji, + MessageUrn: messageUrn, + }) + if err != nil { + return nil, err + } + + for _, participant := range participants { + backfillReaction := &bridgev2.BackfillReaction{ + Timestamp: time.UnixMilli(reaction.FirstReactedAt), + Sender: bridgev2.EventSender{ + IsFromMe: participant.HostIdentityUrn == lc.client.GetCurrentUserID(), + Sender: networkid.UserID(participant.HostIdentityUrn), + }, + EmojiID: "", + Emoji: reaction.Emoji, + } + backfillReactions = append(backfillReactions, backfillReaction) + } + } + return backfillReactions, nil +} + +func (tc *LinkedInClient) LinkedInAttachmentToMatrix(ctx context.Context, portal *bridgev2.Portal, intent bridgev2.MatrixAPI, content payload.RenderContent) (*bridgev2.ConvertedMessagePart, error) { + var attachmentURL string + var mimeType string + var msgType bridgeEvt.MessageType + var attachmentSize int + var duration int + var height int + var width int + if image := content.VectorImage; image != nil { + // image attachment + msgType = bridgeEvt.MsgImage + attachmentURL = image.RootURL + } else if video := content.Video; video != nil { + // video attachment + attachmentURL = video.ProgressiveStreams[0].StreamingLocations[0].Url + mimeType = video.ProgressiveStreams[0].MediaType + msgType = bridgeEvt.MsgVideo + attachmentSize = video.ProgressiveStreams[0].Size + height = video.ProgressiveStreams[0].Height + width = video.ProgressiveStreams[0].Width + } else if audio := content.Audio; audio != nil { + // video attachment + attachmentURL = audio.URL + msgType = bridgeEvt.MsgAudio + duration = audio.Duration + } else if file := content.File; file != nil { + // video attachment + attachmentURL = file.URL + mimeType = string(file.MediaType) + msgType = bridgeEvt.MsgFile + attachmentSize = file.ByteSize + } + + cookieString := tc.client.GetCookieString() + attachmentSize, err := GetFileSize(ctx, cookieString, attachmentURL) + if err != nil { + return nil, err + } + + uploadContent := bridgeEvt.MessageEventContent{ + Info: &bridgeEvt.FileInfo{ + MimeType: mimeType, + Height: height, + Width: width, + Duration: duration, + Size: attachmentSize, + }, + MsgType: msgType, + Body: "", + } + + uploadContent.URL, uploadContent.File, err = intent.UploadMediaStream(ctx, portal.MXID, int64(attachmentSize), true, func(file io.Writer) (*bridgev2.FileStreamResult, error) { + err = GetPlainFileStream(ctx, cookieString, attachmentURL, "linkedin attachment", file) + if err != nil { + return nil, err + } + + return &bridgev2.FileStreamResult{MimeType: uploadContent.Info.MimeType}, nil + }) + + if err != nil { + return nil, err + } + + return &bridgev2.ConvertedMessagePart{ + ID: networkid.PartID(""), + Type: bridgeEvt.EventMessage, + Content: &uploadContent, + }, nil +} + +func GetPlainFileStream(ctx context.Context, cookies, url, thing string, writer io.Writer) error { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return fmt.Errorf("failed to prepare request: %w", err) + } + + if cookies != "" { + req.Header.Add("cookie", cookies) + } + + getResp, err := http.DefaultClient.Do(req) + if err != nil { + return fmt.Errorf("failed to download %s: %w", thing, err) + } + + _, err = io.Copy(writer, getResp.Body) + if err != nil { + return fmt.Errorf("failed to read %s data: %w", thing, err) + } + + return nil +} + +func GetFileSize(ctx context.Context, cookies, url string) (int, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodHead, url, nil) + if err != nil { + return 0, fmt.Errorf("failed to prepare request: %w", err) + } + + if cookies != "" { + req.Header.Add("cookie", cookies) + } + + headResp, err := http.DefaultClient.Do(req) + if err != nil { + return 0, fmt.Errorf("failed to get file size: %w", err) + } + + return int(headResp.ContentLength), nil +} diff --git a/pkg/linkedingo/client.go b/pkg/linkedingo/client.go new file mode 100644 index 0000000..23b1bd3 --- /dev/null +++ b/pkg/linkedingo/client.go @@ -0,0 +1,163 @@ +package linkedingo + +import ( + "encoding/json" + "fmt" + "net" + "net/http" + "net/url" + "time" + + "github.com/rs/zerolog" + "golang.org/x/net/proxy" + + "github.com/beeper/linkedin/pkg/linkedingo/cookies" + "github.com/beeper/linkedin/pkg/linkedingo/routing" + queryData "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type EventHandler func(evt any) +type ClientOpts struct { + Cookies *cookies.Cookies + EventHandler EventHandler +} +type Client struct { + Logger zerolog.Logger + cookies *cookies.Cookies + pageLoader *PageLoader + rc *RealtimeClient + http *http.Client + httpProxy func(*http.Request) (*url.URL, error) + socksProxy proxy.Dialer + eventHandler EventHandler +} + +func NewClient(opts *ClientOpts, logger zerolog.Logger) *Client { + cli := Client{ + http: &http.Client{ + Transport: &http.Transport{ + DialContext: (&net.Dialer{Timeout: 10 * time.Second}).DialContext, + TLSHandshakeTimeout: 10 * time.Second, + ResponseHeaderTimeout: 40 * time.Second, + ForceAttemptHTTP2: true, + }, + Timeout: 60 * time.Second, + }, + Logger: logger, + } + + if opts.EventHandler != nil { + cli.SetEventHandler(opts.EventHandler) + } + + if opts.Cookies != nil { + cli.cookies = opts.Cookies + } else { + cli.cookies = cookies.NewCookies() + } + + cli.rc = cli.newRealtimeClient() + cli.pageLoader = cli.newPageLoader() + + return &cli +} + +func (c *Client) Connect() error { + return c.rc.Connect() +} + +func (c *Client) Disconnect() error { + return c.rc.Disconnect() +} + +func (c *Client) Logout() error { + query := queryData.LogoutQuery{ + CsrfToken: c.cookies.Get(cookies.LinkedInJSESSIONID), + } + encodedQuery, err := query.Encode() + if err != nil { + return err + } + + url := fmt.Sprintf("%s?%s", routing.LOGOUT_URL, string(encodedQuery)) + + logoutDefinition := routing.RequestStoreDefinition[routing.LOGOUT_URL] + headers := c.buildHeaders(logoutDefinition.HeaderOpts) + _, _, err = c.MakeRequest(url, http.MethodGet, headers, make([]byte, 0), logoutDefinition.ContentType) + return err +} + +func (c *Client) GetCookieString() string { + return c.cookies.String() +} + +func (c *Client) LoadMessagesPage() error { + return c.pageLoader.LoadMessagesPage() +} + +func (c *Client) GetCurrentUserID() string { + return c.pageLoader.CurrentUser.FsdProfileID +} + +func (c *Client) GetCurrentUserProfile() (*types.UserLoginProfile, error) { + headers := c.buildHeaders(types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiProtocolVer: true, + WithXLiLang: true, + }) + + _, data, err := c.MakeRequest(string(routing.VOYAGER_COMMON_ME_URL), http.MethodGet, headers, make([]byte, 0), types.JSON_LINKEDIN_NORMALIZED) + if err != nil { + return nil, err + } + + var response types.GetCommonMeResponse + + err = json.Unmarshal(data, &response) + if err != nil { + return nil, err + } + + userProfile := &types.UserLoginProfile{ + PlainId: response.Data["plainId"], + MiniProfile: response.Included[0], + } + + return userProfile, nil +} + +func (c *Client) SetProxy(proxyAddr string) error { + proxyParsed, err := url.Parse(proxyAddr) + if err != nil { + return err + } + + if proxyParsed.Scheme == "http" || proxyParsed.Scheme == "https" { + c.httpProxy = http.ProxyURL(proxyParsed) + c.http.Transport.(*http.Transport).Proxy = c.httpProxy + } else if proxyParsed.Scheme == "socks5" { + c.socksProxy, err = proxy.FromURL(proxyParsed, &net.Dialer{Timeout: 20 * time.Second}) + if err != nil { + return err + } + c.http.Transport.(*http.Transport).Dial = c.socksProxy.Dial + contextDialer, ok := c.socksProxy.(proxy.ContextDialer) + if ok { + c.http.Transport.(*http.Transport).DialContext = contextDialer.DialContext + } + } + + c.Logger.Debug(). + Str("scheme", proxyParsed.Scheme). + Str("host", proxyParsed.Host). + Msg("Using proxy") + return nil +} + +func (c *Client) SetEventHandler(handler EventHandler) { + c.eventHandler = handler +} diff --git a/pkg/linkedingo/client_test.go b/pkg/linkedingo/client_test.go new file mode 100644 index 0000000..5ba91eb --- /dev/null +++ b/pkg/linkedingo/client_test.go @@ -0,0 +1,456 @@ +package linkedingo_test + +import ( + "log" + "os" + "testing" + + "github.com/beeper/linkedin/pkg/linkedingo" + "github.com/beeper/linkedin/pkg/linkedingo/cookies" + "github.com/beeper/linkedin/pkg/linkedingo/debug" + "github.com/beeper/linkedin/pkg/linkedingo/event" + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +var cli *linkedingo.Client + +func TestClientMain(t *testing.T) { + cookieStr, err := os.ReadFile("cookies.txt") + if err != nil { + log.Fatal(err) + } + cookieStruct := cookies.NewCookiesFromString(string(cookieStr)) + + clientOpts := linkedingo.ClientOpts{ + Cookies: cookieStruct, + } + cli = linkedingo.NewClient(&clientOpts, debug.NewLogger()) + cli.SetEventHandler(evHandler) + + err = cli.LoadMessagesPage() + if err != nil { + log.Fatalf("error while loading main messaging page: %s", err.Error()) + } + + err = cli.Connect() + if err != nil { + log.Fatal(err) + } + + wait := make(chan struct{}) + <-wait +} + +func evHandler(data any) { + switch evtData := data.(type) { + case event.MessageEvent: + cli.Logger.Info().Str("text", evtData.Message.Body.Text).Msg("Received message event") + case event.SystemMessageEvent: + cli.Logger.Info().Str("text", evtData.Message.Body.Text).Msg("Received a system message event") + case event.MessageEditedEvent: + cli.Logger.Info().Str("text", evtData.Message.Body.Text).Msg("Received message edited event") + case event.MessageDeleteEvent: + cli.Logger.Info().Str("text", evtData.Message.Body.Text).Msg("Received message delete event") + case event.MessageSeenEvent: + cli.Logger.Info().Any("receipt", evtData.Receipt).Msg("Received message seen event") + case event.MessageReactionEvent: + cli.Logger.Info().Any("reaction", evtData.Reaction).Msg("Received message reaction event") + case event.TypingIndicatorEvent: + cli.Logger.Info().Any("indicator", evtData.Indicator).Msg("Received typing indicator event") + case event.ThreadDeleteEvent: + cli.Logger.Info().Str("thread_id", evtData.Thread.EntityUrn).Msg("Thread was deleted") + case event.ThreadUpdateEvent: + cli.Logger.Info().Any("thread_id", evtData.Thread.EntityUrn).Msg("Thread was updated") + case event.ConnectionReady: + cli.Logger.Info().Msg("Real-time client is connected and ready") + case event.ConnectionClosed: + cli.Logger.Error().Str("reason", string(evtData.Reason)).Msg("Real-time client closed the connection") + cli.Logger.Info().Msg("Attempting to reconnect real-time client") + err := cli.Connect() + if err != nil { + cli.Logger.Fatal().Err(err).Msg("Real-time client failed to reconnect") + } + default: + cli.Logger.Info().Any("evt_data", evtData).Msg("Received unhandled event struct") + } +} + +func testDeleteConversation() { + firstThread := getTopThread() + + err := cli.DeleteConversation(firstThread.EntityUrn) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Str("conversationUrn", firstThread.EntityUrn).Msg("Successfully deleted conversation") + os.Exit(1) +} + +func testCreateConversation() { + // there is not any other endpoint for creating convos, you just send a message with recipient urns instead of conversation urn + participantIds := []string{ + "user:id:urn:1", + "user:id:urn:2", + } + createConvoPayload := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: "new convo created", + }, + }, + DedupeByClientGeneratedToken: false, + HostRecipientUrns: participantIds, + ConversationTitle: "test title", + } + + messageResp, err := cli.SendMessage(createConvoPayload) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Any("create_conv_msg_resp", messageResp).Any("participant_ids", participantIds).Msg("Successfully created conversation") + os.Exit(1) +} + +func testReadConversations() { + threads, err := cli.GetThreads(query.GetThreadsVariables{}) + if err != nil { + log.Fatal(err) + } + + pickedThreadUrns := make([]string, 0) + for _, thread := range threads.Threads { + if !thread.Read { + pickedThreadUrns = append(pickedThreadUrns, thread.EntityUrn) + } + } + + if len(pickedThreadUrns) == 0 { + log.Fatal("failed to find an unread thread to read") + } + + resp, err := cli.MarkThreadRead(pickedThreadUrns, true) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Any("response_data", resp).Any("thread_ids", pickedThreadUrns).Msg("Successfully read threads!") + os.Exit(1) +} + +func testReplyToMessage() { + firstThread := getTopThread() + firstMessage := firstThread.MessageElements.Messages[0] + + replyMessageBody := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: "testing to reply to message", + }, + RenderContentUnions: []payload.RenderContent{ + { + RepliedMessageContent: &payload.RepliedMessageContent{ + OriginalSenderUrn: firstMessage.Sender.EntityUrn, + OriginalMessageUrn: firstMessage.EntityUrn, + MessageBody: firstMessage.Body, + OriginalSendAt: firstMessage.DeliveredAt, + }, + }, + }, + ConversationUrn: firstThread.EntityUrn, + }, + DedupeByClientGeneratedToken: false, + } + + messageResp, err := cli.SendMessage(replyMessageBody) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Any("message_data", messageResp).Msg("Successfully replied to message") + os.Exit(1) +} + +func testDeleteMessage() { + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + + messages, err := cli.FetchMessages(query.FetchMessagesVariables{ + ConversationUrn: firstThreadUrn, + }) + if err != nil { + log.Fatal(err) + } + + myUserId := cli.GetCurrentUserID() + var pickedMessage *response.MessageElement + for _, msg := range messages.Messages { + if msg.MessageBodyRenderFormat != response.RenderFormatReCalled && myUserId == msg.Sender.HostIdentityUrn { + pickedMessage = &msg + break + } + } + + if pickedMessage == nil { + log.Fatalf("failed to find a valid message to delete in conversation with urn %s", firstThreadUrn) + } + + messageUrn := pickedMessage.EntityUrn + err = cli.DeleteMessage(messageUrn) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Str("text", pickedMessage.Body.Text).Str("conversationUrn", firstThreadUrn).Msg("Successfully deleted message in conversation") + os.Exit(1) +} + +func testLogAllMessages() { + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + + variables := query.FetchMessagesVariables{ + ConversationUrn: firstThreadUrn, + } + messageResp, err := cli.FetchMessages(variables) + if err != nil { + log.Fatal(err) + } + + lastMessage := messageResp.Messages[len(messageResp.Messages)-1] + variables.DeliveredAt = lastMessage.DeliveredAt + variables.CountBefore = 20 + variables.CountAfter = 0 + + messageResp, err = cli.FetchMessages(variables) + if err != nil { + log.Fatal(err) + } + + prevCursor := messageResp.Metadata.PrevCursor + variables = query.FetchMessagesVariables{ + ConversationUrn: firstThreadUrn, + PrevCursor: prevCursor, + Count: 20, + } + for variables.PrevCursor != "" { + messageResp, err = cli.FetchMessages(variables) + if err != nil { + log.Fatal(err) + } + + for _, msg := range messageResp.Messages { + cli.Logger.Info().Str("text", msg.Body.Text).Msg("Message") + } + + variables.PrevCursor = messageResp.Metadata.PrevCursor + } + + os.Exit(1) +} + +func testLogAllThreads() { + variables := query.GetThreadsVariables{} // empty for first page + threads, err := cli.GetThreads(variables) + if err != nil { + log.Fatal(err) + } + + lastThread := threads.Threads[len(threads.Threads)-1] + lastActvityAt := lastThread.LastActivityAt // cursor + + variables.Count = 20 + variables.InboxCategory = query.INBOX_CATEGORY_PRIMARY + variables.LastUpdatedBefore = lastActvityAt + + threads, err = cli.GetThreads(variables) + if err != nil { + log.Fatal(err) + } + + // now threads.Metadata.NextCursor contains the next cursor to use in variables. + log.Println("Next cursor:", threads.Metadata.NextCursor) + os.Exit(1) +} + +// starts typing in the top conversation +func testStartTyping() { + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + + err := cli.StartTyping(firstThreadUrn) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info().Str("conversationUrn", firstThreadUrn).Msg("Successfully started typing in top conversation") + os.Exit(1) +} + +func testUploadVideo() { + videoBytes, err := os.ReadFile("test_data/testvideo1.mp4") + if err != nil { + log.Fatal(err) + } + + mediaContentType := types.VIDEO_MP4 + fileName := "testvideo1.mp4" + mediaResult, err := cli.UploadMedia(payload.MESSAGING_FILE_ATTACHMENT, fileName, videoBytes, mediaContentType) + if err != nil { + log.Fatal(err) + } + + renderContentFile := payload.RenderContent{ + File: &payload.File{ + AssetUrn: mediaResult.Urn, + Name: fileName, + MediaType: mediaContentType, + ByteSize: len(videoBytes), + }, + } + + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + + sendMessagePayload := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: "", + }, + RenderContentUnions: []payload.RenderContent{renderContentFile}, + ConversationUrn: firstThreadUrn, + }, + DedupeByClientGeneratedToken: false, + } + + resp, err := cli.SendMessage(sendMessagePayload) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info(). + Any("renderContentUnions", resp.Data.RenderContentUnions). + Int64("deliveredAt", resp.Data.DeliveredAt). + Str("text", resp.Data.Body.Text). + Str("conversationUrn", firstThreadUrn). + Msg("Successfully sent test video to top conversation") + os.Exit(1) +} + +func testUploadImage() { + imgBytes, err := os.ReadFile("test_data/testimage1.jpg") + if err != nil { + log.Fatal(err) + } + + mediaContentType := types.IMAGE_JPEG + fileName := "testimage1.jpg" + mediaResult, err := cli.UploadMedia(payload.MESSAGING_PHOTO_ATTACHMENT, fileName, imgBytes, mediaContentType) + if err != nil { + log.Fatal(err) + } + + renderContentFile := payload.RenderContent{ + File: &payload.File{ + AssetUrn: mediaResult.Urn, + Name: fileName, + MediaType: mediaContentType, + ByteSize: len(imgBytes), + }, + } + + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + + sendMessagePayload := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: "", + }, + RenderContentUnions: []payload.RenderContent{renderContentFile}, + ConversationUrn: firstThreadUrn, + }, + DedupeByClientGeneratedToken: false, + } + + resp, err := cli.SendMessage(sendMessagePayload) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info(). + Any("renderContentUnions", resp.Data.RenderContentUnions). + Int64("deliveredAt", resp.Data.DeliveredAt). + Str("text", resp.Data.Body.Text). + Str("conversationUrn", firstThreadUrn). + Msg("Successfully sent test image to top conversation") + os.Exit(1) +} + +func testEditMessage() { + firstThread := getTopThread() + firstThreadUrn := firstThread.EntityUrn + firstMessage := firstThread.MessageElements.Messages[0] + firstMessageUrn := firstMessage.EntityUrn + + newMessageBody := payload.MessageBody{ + Text: "new message content test", + } + err := cli.EditMessage(firstMessageUrn, newMessageBody) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info(). + Str("thread_id", firstThreadUrn). + Str("message_id", firstMessageUrn). + Str("new_text", newMessageBody.Text). + Str("old_text", firstMessage.Body.Text). + Msg("Successfully edited message") + os.Exit(1) +} + +// sends a message in the top conversation +func testSendMessage() { + threads, err := cli.GetThreads(query.GetThreadsVariables{}) + if err != nil { + log.Fatal(err) + } + + firstThread := threads.Threads[0] + firstThreadUrn := firstThread.EntityUrn + + sendMessagePayload := payload.SendMessagePayload{ + Message: payload.SendMessageData{ + Body: payload.MessageBody{ + Text: "testing sending a message", + }, + ConversationUrn: firstThreadUrn, + }, + DedupeByClientGeneratedToken: false, + } + + resp, err := cli.SendMessage(sendMessagePayload) + if err != nil { + log.Fatal(err) + } + + cli.Logger.Info(). + Int64("deliveredAt", resp.Data.DeliveredAt). + Str("text", resp.Data.Body.Text). + Str("conversationUrn", firstThreadUrn). + Msg("Successfully sent test message to top conversation") + os.Exit(1) +} + +func getTopThread() response.ThreadElement { + threads, err := cli.GetThreads(query.GetThreadsVariables{}) + if err != nil { + log.Fatal(err) + } + return threads.Threads[0] +} diff --git a/pkg/linkedingo/cookies/cookies.go b/pkg/linkedingo/cookies/cookies.go new file mode 100644 index 0000000..bdc6c14 --- /dev/null +++ b/pkg/linkedingo/cookies/cookies.go @@ -0,0 +1,111 @@ +package cookies + +import ( + "fmt" + "net/http" + "strings" + "sync" + "time" +) + +type LinkedInCookieName string + +const ( + LinkedInLang LinkedInCookieName = "lang" + LinkedInBCookie LinkedInCookieName = "bcookie" + LinkedInBscookie LinkedInCookieName = "bscookie" + LinkedInLiAlerts LinkedInCookieName = "li_alerts" + LinkedInLiGc LinkedInCookieName = "li_gc" + LinkedInLiRm LinkedInCookieName = "li_rm" + LinkedInGclAu LinkedInCookieName = "_gcl_au" + LinkedInAMCVSAdobeOrg LinkedInCookieName = "AMCVS_14215E3D5995C57C0A495C55%40AdobeOrg" // ??? + LinkedInAamUuid LinkedInCookieName = "aam_uuid" + LinkedInLiap LinkedInCookieName = "liap" + LinkedInLiAt LinkedInCookieName = "li_at" + LinkedInJSESSIONID LinkedInCookieName = "JSESSIONID" + LinkedInTimezone LinkedInCookieName = "timezone" + LinkedInDfpfpt LinkedInCookieName = "dfpfpt" + LinkedInFptctx2 LinkedInCookieName = "fptctx2" + LinkedInAMCVAdobeOrg LinkedInCookieName = "AMCV_14215E3D5995C57C0A495C55%40AdobeOrg" // ??? + LinkedInLiMc LinkedInCookieName = "li_mc" + LinkedInCfBm LinkedInCookieName = "__cf_bm" + LinkedInLiTheme LinkedInCookieName = "li_theme" + LinkedInLiThemeSet LinkedInCookieName = "li_theme_set" + LinkedInLiSugr LinkedInCookieName = "li_sugr" + LinkedInGuid LinkedInCookieName = "_guid" + LinkedInUserMatchHistory LinkedInCookieName = "UserMatchHistory" + LinkedInAnalyticsSyncHistory LinkedInCookieName = "AnalyticsSyncHistory" + LinkedInLmsAds LinkedInCookieName = "lms_ads" + LinkedInLmsAnalytics LinkedInCookieName = "lms_analytics" + LinkedInLidc LinkedInCookieName = "lidc" +) + +type Cookies struct { + Store map[LinkedInCookieName]string + lock sync.RWMutex +} + +func NewCookies() *Cookies { + return &Cookies{ + Store: make(map[LinkedInCookieName]string), + lock: sync.RWMutex{}, + } +} + +func NewCookiesFromString(cookieStr string) *Cookies { + c := NewCookies() + cookieStrings := strings.Split(cookieStr, ";") + fakeHeader := http.Header{} + for _, cookieStr := range cookieStrings { + trimmedCookieStr := strings.TrimSpace(cookieStr) + if trimmedCookieStr != "" { + fakeHeader.Add("Set-Cookie", trimmedCookieStr) + } + } + fakeResponse := &http.Response{Header: fakeHeader} + + for _, cookie := range fakeResponse.Cookies() { + c.Store[LinkedInCookieName(cookie.Name)] = cookie.Value + } + + return c +} + +func (c *Cookies) String() string { + c.lock.RLock() + defer c.lock.RUnlock() + var out []string + for k, v := range c.Store { + out = append(out, fmt.Sprintf("%s=%s", k, v)) + } + return strings.Join(out, "; ") +} + +func (c *Cookies) IsCookieEmpty(key LinkedInCookieName) bool { + return c.Get(key) == "" +} + +func (c *Cookies) Get(key LinkedInCookieName) string { + c.lock.RLock() + defer c.lock.RUnlock() + return c.Store[key] +} + +func (c *Cookies) Set(key LinkedInCookieName, value string) { + c.lock.Lock() + defer c.lock.Unlock() + c.Store[key] = value +} + +func (c *Cookies) UpdateFromResponse(r *http.Response) { + c.lock.Lock() + defer c.lock.Unlock() + for _, cookie := range r.Cookies() { + if cookie.MaxAge == 0 || cookie.Expires.Before(time.Now()) { + delete(c.Store, LinkedInCookieName(cookie.Name)) + } else { + //log.Println(fmt.Sprintf("updated cookie %s to value %s", cookie.Name, cookie.Value)) + c.Store[LinkedInCookieName(cookie.Name)] = cookie.Value + } + } +} diff --git a/pkg/linkedingo/debug/logger.go b/pkg/linkedingo/debug/logger.go new file mode 100644 index 0000000..e981864 --- /dev/null +++ b/pkg/linkedingo/debug/logger.go @@ -0,0 +1,56 @@ +package debug + +import ( + "encoding/hex" + "fmt" + "strings" + "time" + + "github.com/mattn/go-colorable" + zerolog "github.com/rs/zerolog" +) + +var colors = map[string]string{ + "text": "\x1b[38;5;6m%s\x1b[0m", + "debug": "\x1b[32mDEBUG\x1b[0m", + "gray": "\x1b[38;5;8m%s\x1b[0m", + "info": "\x1b[38;5;111mINFO\x1b[0m", + "error": "\x1b[38;5;204mERROR\x1b[0m", + "fatal": "\x1b[38;5;52mFATAL\x1b[0m", +} + +var output = zerolog.ConsoleWriter{ + Out: colorable.NewColorableStdout(), + TimeFormat: time.ANSIC, + FormatLevel: func(i any) string { + name := fmt.Sprintf("%s", i) + coloredName := colors[name] + return coloredName + }, + FormatMessage: func(i any) string { + coloredMsg := fmt.Sprintf(colors["text"], i) + return coloredMsg + }, + FormatFieldName: func(i any) string { + name := fmt.Sprintf("%s", i) + return fmt.Sprintf(colors["gray"], name+"=") + }, + FormatFieldValue: func(i any) string { + return fmt.Sprintf("%s", i) + }, + NoColor: false, +} + +func NewLogger() zerolog.Logger { + return zerolog.New(output).With().Timestamp().Logger() +} + +func BeautifyHex(data []byte) string { + hexStr := hex.EncodeToString(data) + result := "" + for i := 0; i < len(hexStr); i += 2 { + result += hexStr[i:i+2] + " " + } + + return strings.TrimRight(result, " ") +} diff --git a/pkg/linkedingo/errors.go b/pkg/linkedingo/errors.go new file mode 100644 index 0000000..cc49430 --- /dev/null +++ b/pkg/linkedingo/errors.go @@ -0,0 +1,7 @@ +package linkedingo + +import "fmt" + +func newErrorResponseTypeAssertFailed(t string) error { + return fmt.Errorf("failed to type assert response from routing request into %s", t) +} diff --git a/pkg/linkedingo/event/event.go b/pkg/linkedingo/event/event.go new file mode 100644 index 0000000..f4b0800 --- /dev/null +++ b/pkg/linkedingo/event/event.go @@ -0,0 +1,58 @@ +package event + +import ( + "time" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type MessageEvent struct { + Message response.MessageElement +} + +type SystemMessageEvent struct { + Message response.MessageElement +} + +type MessageEditedEvent struct { + Message response.MessageElement +} + +type MessageDeleteEvent struct { + Message response.MessageElement +} + +type MessageSeenEvent struct { + Receipt response.MessageSeenReceipt +} + +type MessageReactionEvent struct { + Reaction response.MessageReaction +} + +type UserPresenceEvent struct { + FsdProfileId string + Status types.PresenceAvailabilityStatus + LastActiveAt time.Time +} + +type TypingIndicatorEvent struct { + Indicator response.TypingIndicator +} + +// this event is responsible for most thread updates like: +// Title changes, archived, unarchived etc +type ThreadUpdateEvent struct { + Thread response.ThreadElement +} + +type ThreadDeleteEvent struct { + Thread response.Conversation +} + +type ConnectionReady struct{} + +type ConnectionClosed struct { + Reason types.ConnectionClosedReason +} diff --git a/pkg/linkedingo/event/raw/decorated.go b/pkg/linkedingo/event/raw/decorated.go new file mode 100644 index 0000000..b00625c --- /dev/null +++ b/pkg/linkedingo/event/raw/decorated.go @@ -0,0 +1,71 @@ +package raw + +import ( + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type DecoratedEventResponse struct { + Topic string `json:"topic,omitempty"` + PublisherTrackingID string `json:"publisherTrackingId,omitempty"` + LeftServerAt int64 `json:"leftServerAt,omitempty"` + ID string `json:"id,omitempty"` + Payload DecoratedEventPayload `json:"payload,omitempty"` + TrackingID string `json:"trackingId,omitempty"` +} + +type DecoratedEventPayload struct { + Data DecoratedEventData `json:"data,omitempty"` + Metadata Metadata `json:"$metadata,omitempty"` + LastActiveAt int64 `json:"lastActiveAt,omitempty"` + Availability types.PresenceAvailabilityStatus `json:"availability,omitempty"` +} + +type DecoratedMessageRealtime struct { + Result response.MessageElement `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedSeenReceipt struct { + Result response.MessageSeenReceipt `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedTypingIndiciator struct { + Result response.TypingIndicator `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedMessageReaction struct { + Result response.MessageReaction `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedDeletedConversation struct { + Result response.Conversation `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedUpdatedConversation struct { + Result response.ThreadElement `json:"result,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` +} + +type DecoratedEventData struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + DecoratedMessage *DecoratedMessageRealtime `json:"doDecorateMessageMessengerRealtimeDecoration,omitempty"` + DecoratedSeenReceipt *DecoratedSeenReceipt `json:"doDecorateSeenReceiptMessengerRealtimeDecoration,omitempty"` + DecoratedTypingIndicator *DecoratedTypingIndiciator `json:"doDecorateTypingIndicatorMessengerRealtimeDecoration,omitempty"` + DecoratedMessageReaction *DecoratedMessageReaction `json:"doDecorateRealtimeReactionSummaryMessengerRealtimeDecoration,omitempty"` + DecoratedDeletedConversation *DecoratedDeletedConversation `json:"doDecorateConversationDeleteMessengerRealtimeDecoration,omitempty"` + DecoratedUpdatedConversation *DecoratedUpdatedConversation `json:"doDecorateConversationMessengerRealtimeDecoration,omitempty"` +} + +type Metadata struct{} diff --git a/pkg/linkedingo/event/raw/messaging.go b/pkg/linkedingo/event/raw/messaging.go new file mode 100644 index 0000000..4649855 --- /dev/null +++ b/pkg/linkedingo/event/raw/messaging.go @@ -0,0 +1,57 @@ +package raw + +import "github.com/beeper/linkedin/pkg/linkedingo/event" + +func (p *DecoratedEventData) ToMessageEvent() event.MessageEvent { + return event.MessageEvent{ + Message: p.DecoratedMessage.Result, + } +} + +func (p *DecoratedEventData) ToSystemMessageEvent() event.SystemMessageEvent { + return event.SystemMessageEvent{ + Message: p.DecoratedMessage.Result, + } +} + +func (p *DecoratedEventData) ToMessageEditedEvent() event.MessageEditedEvent { + return event.MessageEditedEvent{ + Message: p.DecoratedMessage.Result, + } +} + +func (p *DecoratedEventData) ToMessageDeleteEvent() event.MessageDeleteEvent { + return event.MessageDeleteEvent{ + Message: p.DecoratedMessage.Result, + } +} + +func (p *DecoratedEventData) ToMessageSeenEvent() event.MessageSeenEvent { + return event.MessageSeenEvent{ + Receipt: p.DecoratedSeenReceipt.Result, + } +} + +func (p *DecoratedEventData) ToMessageReactionEvent() event.MessageReactionEvent { + return event.MessageReactionEvent{ + Reaction: p.DecoratedMessageReaction.Result, + } +} + +func (p *DecoratedEventData) ToTypingIndicatorEvent() event.TypingIndicatorEvent { + return event.TypingIndicatorEvent{ + Indicator: p.DecoratedTypingIndicator.Result, + } +} + +func (p *DecoratedEventData) ToThreadUpdateEvent() event.ThreadUpdateEvent { + return event.ThreadUpdateEvent{ + Thread: p.DecoratedUpdatedConversation.Result, + } +} + +func (p *DecoratedEventData) ToThreadDeleteEvent() event.ThreadDeleteEvent { + return event.ThreadDeleteEvent{ + Thread: p.DecoratedDeletedConversation.Result, + } +} diff --git a/pkg/linkedingo/event/raw/presence.go b/pkg/linkedingo/event/raw/presence.go new file mode 100644 index 0000000..8455594 --- /dev/null +++ b/pkg/linkedingo/event/raw/presence.go @@ -0,0 +1,15 @@ +package raw + +import ( + "github.com/beeper/linkedin/pkg/linkedingo/event" + + "time" +) + +func (p *DecoratedEventPayload) ToPresenceStatusUpdateEvent(fsdProfileId string) event.UserPresenceEvent { + return event.UserPresenceEvent{ + FsdProfileId: fsdProfileId, + Status: p.Availability, + LastActiveAt: time.UnixMilli(p.LastActiveAt), + } +} diff --git a/pkg/linkedingo/headers.go b/pkg/linkedingo/headers.go new file mode 100644 index 0000000..5648176 --- /dev/null +++ b/pkg/linkedingo/headers.go @@ -0,0 +1,85 @@ +package linkedingo + +import ( + "github.com/beeper/linkedin/pkg/linkedingo/cookies" + "github.com/beeper/linkedin/pkg/linkedingo/types" + + "log" + "net/http" +) + +const BrowserName = "Chrome" +const ChromeVersion = "118" +const ChromeVersionFull = ChromeVersion + ".0.5993.89" +const UserAgent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/" + ChromeVersion + ".0.0.0 Safari/537.36" +const SecCHUserAgent = `"Chromium";v="` + ChromeVersion + `", "Google Chrome";v="` + ChromeVersion + `", "Not-A.Brand";v="99"` +const SecCHFullVersionList = `"Chromium";v="` + ChromeVersionFull + `", "Google Chrome";v="` + ChromeVersionFull + `", "Not-A.Brand";v="99.0.0.0"` +const OSName = "Linux" +const OSVersion = "6.5.0" +const SecCHPlatform = `"` + OSName + `"` +const SecCHPlatformVersion = `"` + OSVersion + `"` +const SecCHMobile = "?0" +const SecCHModel = "" +const SecCHPrefersColorScheme = "light" + +var defaultConstantHeaders = http.Header{ + "accept": []string{"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"}, + "accept-language": []string{"en-US,en;q=0.9"}, + "user-agent": []string{UserAgent}, + "sec-ch-ua": []string{SecCHUserAgent}, + "sec-ch-ua-platform": []string{SecCHPlatform}, + "sec-ch-prefers-color-scheme": []string{SecCHPrefersColorScheme}, + "sec-ch-ua-full-version-list": []string{SecCHFullVersionList}, + "sec-ch-ua-mobile": []string{SecCHMobile}, + // "sec-ch-ua-model": []string{SecCHModel}, + // "sec-ch-ua-platform-version": []string{SecCHPlatformVersion}, +} + +func (c *Client) buildHeaders(opts types.HeaderOpts) http.Header { + if opts.Extra == nil { + opts.Extra = make(map[string]string, 0) + } + + headers := defaultConstantHeaders.Clone() + if opts.WithCookies { + opts.Extra["cookie"] = c.cookies.String() + } + + if opts.WithCsrfToken { + opts.Extra["csrf-token"] = c.cookies.Get(cookies.LinkedInJSESSIONID) + } + + if opts.Origin != "" { + opts.Extra["origin"] = opts.Origin + } + + if opts.WithXLiPageInstance { + opts.Extra["x-li-page-instance"] = c.pageLoader.XLiPageInstance + } + + if opts.WithXLiLang { + opts.Extra["x-li-lang"] = c.pageLoader.XLiLang + } + + if opts.WithXLiTrack { + xLiTrack, err := c.pageLoader.XLiDeviceTrack.Encode() + if err != nil { + log.Fatalf("failed to encode x-li-track header to json bytes: %s", err.Error()) + } + opts.Extra["x-li-track"] = string(xLiTrack) + } + + if opts.WithXLiProtocolVer { + opts.Extra["x-restli-protocol-version"] = "2.0.0" + } + + if opts.Referer != "" { + opts.Extra["referer"] = opts.Referer + } + + for k, v := range opts.Extra { + headers.Set(k, v) + } + + return headers +} diff --git a/pkg/linkedingo/http.go b/pkg/linkedingo/http.go new file mode 100644 index 0000000..ec4e149 --- /dev/null +++ b/pkg/linkedingo/http.go @@ -0,0 +1,155 @@ +package linkedingo + +import ( + "bytes" + "errors" + "fmt" + "io" + "net/http" + "time" + + "github.com/beeper/linkedin/pkg/linkedingo/routing" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +const MaxHTTPRetries = 5 + +var ( + ErrRedirectAttempted = errors.New("redirect attempted") + ErrTokenInvalidated = errors.New("access token is no longer valid") + ErrChallengeRequired = errors.New("challenge required") + ErrConsentRequired = errors.New("consent required") + ErrAccountSuspended = errors.New("account suspended") + ErrRequestFailed = errors.New("failed to send request") + ErrResponseReadFailed = errors.New("failed to read response body") + ErrMaxRetriesReached = errors.New("maximum retries reached") +) + +func isPermanentRequestError(err error) bool { + return errors.Is(err, ErrTokenInvalidated) || + errors.Is(err, ErrChallengeRequired) || + errors.Is(err, ErrConsentRequired) || + errors.Is(err, ErrAccountSuspended) +} + +func (c *Client) MakeRoutingRequest(endpointURL routing.RequestEndpointURL, payload routing.PayloadDataInterface, query routing.PayloadDataInterface) (*http.Response, any, error) { + routingDefinition, ok := routing.RequestStoreDefinition[endpointURL] + if !ok { + return nil, nil, fmt.Errorf("failed to find request definition for endpointURL %s", string(endpointURL)) + } + + headers := c.buildHeaders(routingDefinition.HeaderOpts) + url := string(endpointURL) + + if query != nil { + encodedQuery, err := query.Encode() + if err != nil { + return nil, nil, err + } + url = url + "?" + string(encodedQuery) + } + + var payloadBytes []byte + if payload != nil { + encodedPayload, err := payload.Encode() + if err != nil { + return nil, nil, err + } + payloadBytes = encodedPayload + } + + resp, respBody, err := c.MakeRequest(url, routingDefinition.Method, headers, payloadBytes, routingDefinition.ContentType) + if err != nil { + return nil, nil, err + } + + respDefinition := routingDefinition.ResponseDefinition + var respStruct any + if respDefinition != nil && len(respBody) > 0 { + respStruct, err = respDefinition.Decode(respBody) + } + + return resp, respStruct, err +} + +func (c *Client) MakeRequest(url string, method string, headers http.Header, payload []byte, contentType types.ContentType) (*http.Response, []byte, error) { + var attempts int + for { + attempts++ + start := time.Now() + resp, respDat, err := c.makeRequestDirect(url, method, headers, payload, contentType) + dur := time.Since(start) + if err == nil { + c.Logger.Debug(). + Str("url", url). + Str("method", method). + Dur("duration", dur). + Msg("Request successful") + return resp, respDat, nil + } else if attempts > MaxHTTPRetries { + c.Logger.Err(err). + Str("url", url). + Str("method", method). + Dur("duration", dur). + Msg("Request failed, giving up") + return nil, nil, fmt.Errorf("%w: %w", ErrMaxRetriesReached, err) + } else if isPermanentRequestError(err) { + c.Logger.Err(err). + Str("url", url). + Str("method", method). + Dur("duration", dur). + Msg("Request failed, cannot be retried") + return nil, nil, err + } else if errors.Is(err, ErrRedirectAttempted) { + location := resp.Header.Get("Location") + c.Logger.Err(err). + Str("url", url). + Str("location", location). + Str("method", method). + Dur("duration", dur). + Msg("Redirect attempted") + return resp, nil, err + } + c.Logger.Err(err). + Str("url", url). + Str("method", method). + Dur("duration", dur). + Msg("Request failed, retrying") + time.Sleep(time.Duration(attempts) * 3 * time.Second) + } +} + +func (c *Client) makeRequestDirect(url string, method string, headers http.Header, payload []byte, contentType types.ContentType) (*http.Response, []byte, error) { + newRequest, err := http.NewRequest(method, url, bytes.NewBuffer(payload)) + if err != nil { + return nil, nil, err + } + + if contentType != types.NONE { + headers.Set("content-type", string(contentType)) + } + + newRequest.Header = headers + + response, err := c.http.Do(newRequest) + defer func() { + if response != nil && response.Body != nil { + _ = response.Body.Close() + } + }() + if err != nil { + if errors.Is(err, ErrRedirectAttempted) { + return response, nil, err + } + c.Logger.Warn().Str("error", err.Error()).Msg("Http request error") + // c.UpdateProxy(fmt.Sprintf("http request error: %v", err.Error())) + return nil, nil, fmt.Errorf("%w: %w", ErrRequestFailed, err) + } + + responseBody, err := io.ReadAll(response.Body) + if err != nil { + return nil, nil, fmt.Errorf("%w: %w", ErrResponseReadFailed, err) + } + + return response, responseBody, nil +} diff --git a/pkg/linkedingo/media.go b/pkg/linkedingo/media.go new file mode 100644 index 0000000..2dff022 --- /dev/null +++ b/pkg/linkedingo/media.go @@ -0,0 +1,47 @@ +package linkedingo + +import ( + "fmt" + + "github.com/beeper/linkedin/pkg/linkedingo/routing" + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +func (c *Client) UploadMedia(mediaUploadType payload.MediaUploadType, fileName string, mediaBytes []byte, contentType types.ContentType) (*response.MediaMetadata, error) { + uploadMetadataQuery := query.DoActionQuery{ + Action: query.ACTION_UPLOAD, + } + uploadMetadataPayload := payload.UploadMediaMetadataPayload{ + MediaUploadType: mediaUploadType, + FileSize: len(mediaBytes), + Filename: fileName, + } + + _, respData, err := c.MakeRoutingRequest(routing.VOYAGER_MEDIA_UPLOAD_METADATA_URL, uploadMetadataPayload, uploadMetadataQuery) + if err != nil { + return nil, err + } + + metaDataResp, ok := respData.(*response.UploadMediaMetadataResponse) + if !ok { + return nil, newErrorResponseTypeAssertFailed("*response.UploadMediaMetadataResponse") + } + + metaData := metaDataResp.Data.Value + uploadUrl := metaData.SingleUploadURL + + uploadHeaders := c.buildHeaders(types.HeaderOpts{WithCookies: true, WithCsrfToken: true}) + resp, _, err := c.MakeRequest(uploadUrl, "PUT", uploadHeaders, mediaBytes, contentType) + if err != nil { + return nil, err + } + + if resp.StatusCode > 204 { + return nil, fmt.Errorf("failed to upload media with file name %s (statusCode=%d)", fileName, resp.StatusCode) + } + + return &metaData, err +} diff --git a/pkg/linkedingo/messaging.go b/pkg/linkedingo/messaging.go new file mode 100644 index 0000000..ef8b947 --- /dev/null +++ b/pkg/linkedingo/messaging.go @@ -0,0 +1,352 @@ +package linkedingo + +import ( + "encoding/json" + "fmt" + "net/url" + + "github.com/beeper/linkedin/pkg/linkedingo/methods" + "github.com/beeper/linkedin/pkg/linkedingo/routing" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/types" + + "github.com/google/uuid" +) + +// u dont have to pass mailboxUrn if u don't want to +// library will automatically set it for you +func (c *Client) GetThreads(variables query.GetThreadsVariables) (*response.MessengerConversationsResponse, error) { + if variables.MailboxUrn == "" { + variables.MailboxUrn = c.pageLoader.CurrentUser.FsdProfileID + } + + withCursor := variables.LastUpdatedBefore != 0 && variables.NextCursor != "" + var queryId types.GraphQLQueryIDs + if withCursor { + queryId = types.GRAPHQL_QUERY_ID_MESSENGER_CONVERSATIONS_WITH_CURSOR + } else { + queryId = types.GRAPHQL_QUERY_ID_MESSENGER_CONVERSATIONS + } + + variablesQuery, err := variables.Encode() + if err != nil { + return nil, err + } + + threadQuery := query.GraphQLQuery{ + QueryID: queryId, + Variables: string(variablesQuery), + } + + _, respData, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_GRAPHQL_URL, nil, &threadQuery) + if err != nil { + return nil, err + } + + graphQLResponse, ok := respData.(*response.GraphQlResponse) + if !ok || graphQLResponse == nil { + return nil, newErrorResponseTypeAssertFailed("*response.GraphQlResponse") + } + + graphQLResponseData := graphQLResponse.Data + if withCursor { + return graphQLResponseData.MessengerConversationsByCategory, nil + } + + return graphQLResponseData.MessengerConversationsBySyncToken, nil +} + +func (c *Client) FetchMessages(variables query.FetchMessagesVariables) (*response.MessengerMessagesResponse, error) { + withCursor := variables.PrevCursor != "" + withAnchorTimestamp := variables.DeliveredAt != 0 + + var queryId types.GraphQLQueryIDs + if withCursor { + queryId = types.GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_CONVERSATION + } else if withAnchorTimestamp { + queryId = types.GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_ANCHOR_TIMESTAMP + } else { + queryId = types.GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_SYNC_TOKEN + } + + variablesQuery, err := variables.Encode() + if err != nil { + return nil, err + } + messagesQuery := query.GraphQLQuery{ + QueryID: queryId, + Variables: string(variablesQuery), + } + + _, respData, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_GRAPHQL_URL, nil, &messagesQuery) + if err != nil { + return nil, err + } + + graphQLResponse, ok := respData.(*response.GraphQlResponse) + if !ok || graphQLResponse == nil { + return nil, newErrorResponseTypeAssertFailed("*response.GraphQlResponse") + } + + graphQLResponseData := graphQLResponse.Data + if withCursor { + return graphQLResponseData.MessengerMessagesByConversation, nil + } else if withAnchorTimestamp { + return graphQLResponseData.MessengerMessagesByAnchorTimestamp, nil + } + + return graphQLResponseData.MessengerMessagesBySyncToken, nil +} + +func (c *Client) EditMessage(messageUrn string, p payload.MessageBody) error { + url := fmt.Sprintf("%s/%s", routing.VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL, url.QueryEscape(messageUrn)) + + headerOpts := types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + Origin: string(routing.BASE_URL), + WithXLiTrack: true, + WithXLiProtocolVer: true, + WithXLiPageInstance: true, + WithXLiLang: true, + Extra: map[string]string{"accept": string(types.JSON)}, + } + headers := c.buildHeaders(headerOpts) + + editMessagePayload := payload.GraphQLPatchBody{ + Patch: payload.Patch{ + Set: payload.Set{ + Body: p, + }, + }, + } + + payloadBytes, err := editMessagePayload.Encode() + if err != nil { + return err + } + + resp, respBody, err := c.MakeRequest(url, "POST", headers, payloadBytes, types.PLAINTEXT_UTF8) + if err != nil { + return err + } + + if resp.StatusCode > 204 { + return fmt.Errorf("failed to edit message with urn %s (statusCode=%d, response_body=%s)", messageUrn, resp.StatusCode, string(respBody)) + } + + return nil +} + +// function will set mailboxUrn, originToken and trackingId automatically IF it is empty +// so you do not have to set it if u dont want to +func (c *Client) SendMessage(p payload.SendMessagePayload) (*response.MessageSentResponse, error) { + actionQuery := query.DoActionQuery{ + Action: query.ACTION_CREATE_MESSAGE, + } + + if p.MailboxUrn == "" { + p.MailboxUrn = c.pageLoader.CurrentUser.FsdProfileID + } + + if p.TrackingID == "" { + p.TrackingID = methods.GenerateTrackingId() + } + + if p.Message.OriginToken == "" { + p.Message.OriginToken = uuid.NewString() + } + + resp, respData, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL, p, actionQuery) + if err != nil { + return nil, err + } + + if resp.StatusCode > 204 { + return nil, fmt.Errorf("failed to send message to conversation with urn %s (statusCode=%d)", p.Message.ConversationUrn, resp.StatusCode) + } + + messageSentResponse, ok := respData.(*response.MessageSentResponse) + if !ok { + return nil, newErrorResponseTypeAssertFailed("*response.MessageSentResponse") + } + + return messageSentResponse, nil +} + +func (c *Client) StartTyping(conversationUrn string) error { + actionQuery := query.DoActionQuery{ + Action: query.ACTION_TYPING, + } + + payload := payload.StartTypingPayload{ + ConversationUrn: conversationUrn, + } + + resp, _, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_DASH_MESSENGER_CONVERSATIONS_URL, payload, actionQuery) + if err != nil { + return err + } + + if resp.StatusCode > 204 { + return fmt.Errorf("failed to start typing in conversation with urn %s (statusCode=%d)", conversationUrn, resp.StatusCode) + } + + return nil +} + +func (c *Client) DeleteMessage(messageUrn string) error { + actionQuery := query.DoActionQuery{ + Action: query.ACTION_RECALL, + } + + payload := payload.DeleteMessagePayload{ + MessageUrn: messageUrn, + } + + resp, _, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL, payload, actionQuery) + if err != nil { + return err + } + + if resp.StatusCode > 204 { + return fmt.Errorf("failed to delete message with message urn %s (statusCode=%d)", messageUrn, resp.StatusCode) + } + + return nil +} + +// this endpoint allows you to mark multiple threads as read/unread at a time +// pass false to second arg to unread all conversations and true to read all of them +func (c *Client) MarkThreadRead(conversationUrns []string, read bool) (*response.MarkThreadReadResponse, error) { + queryUrnValues := "" + entities := make(map[string]payload.GraphQLPatchBody, 0) + for i, convUrn := range conversationUrns { + if i >= len(conversationUrns)-1 { + queryUrnValues += url.QueryEscape(convUrn) + } else { + queryUrnValues += url.QueryEscape(convUrn) + "," + } + entities[convUrn] = payload.GraphQLPatchBody{ + Patch: payload.Patch{ + Set: payload.MarkThreadReadBody{ + Read: read, + }, + }, + } + } + + queryStr := fmt.Sprintf("ids=List(%s)", queryUrnValues) + url := fmt.Sprintf("%s?%s", routing.VOYAGER_MESSAGING_DASH_MESSENGER_CONVERSATIONS_URL, queryStr) + payload := payload.PatchEntitiesPayload{ + Entities: entities, + } + + payloadBytes, err := payload.Encode() + if err != nil { + return nil, err + } + + headerOpts := types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + Origin: string(routing.BASE_URL), + WithXLiTrack: true, + WithXLiProtocolVer: true, + WithXLiPageInstance: true, + WithXLiLang: true, + Extra: map[string]string{"accept": string(types.JSON)}, + } + + headers := c.buildHeaders(headerOpts) + resp, respBody, err := c.MakeRequest(url, "POST", headers, payloadBytes, types.PLAINTEXT_UTF8) + if err != nil { + return nil, err + } + + if resp.StatusCode > 204 { + return nil, fmt.Errorf("failed to read conversations... (statusCode=%d)", resp.StatusCode) + } + + result := &response.MarkThreadReadResponse{} + return result, json.Unmarshal(respBody, result) +} + +func (c *Client) DeleteConversation(conversationUrn string) error { + url := fmt.Sprintf("%s/%s", routing.VOYAGER_MESSAGING_DASH_MESSENGER_CONVERSATIONS_URL, url.QueryEscape(conversationUrn)) + + headers := c.buildHeaders(types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiLang: true, + WithXLiProtocolVer: true, + Origin: string(routing.BASE_URL), + Extra: map[string]string{ + "accept": string(types.GRAPHQL), + }, + }) + + resp, _, err := c.MakeRequest(url, "DELETE", headers, nil, types.NONE) + if err != nil { + return err + } + + if resp.StatusCode > 204 { + return fmt.Errorf("failed to delete conversation with conversation urn %s (statusCode=%d)", conversationUrn, resp.StatusCode) + } + + return nil +} + +// pass true to second arg to react and pass false to unreact +func (c *Client) SendReaction(p payload.SendReactionPayload, react bool) error { + action := query.ACTION_REACT_WITH_EMOJI + if !react { + action = query.ACTION_UNREACT_WITH_EMOJI + } + actionQuery := query.DoActionQuery{ + Action: action, + } + + resp, _, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL, p, actionQuery) + if err != nil { + return err + } + + if resp.StatusCode > 204 { + return fmt.Errorf("failed to perform reaction with emoji %s on message with urn %s (statusCode=%d)", p.Emoji, p.MessageUrn, resp.StatusCode) + } + + return nil +} + +func (c *Client) GetReactionsForEmoji(vars query.GetReactionsForEmojiVariables) ([]types.ConversationParticipant, error) { + variablesQuery, err := vars.Encode() + if err != nil { + return nil, err + } + + query := query.GraphQLQuery{ + QueryID: "messengerMessagingParticipants.3d2e0e93494e9dbf4943dc19da98bdf6", + Variables: string(variablesQuery), + } + + _, respData, err := c.MakeRoutingRequest(routing.VOYAGER_MESSAGING_GRAPHQL_URL, nil, &query) + if err != nil { + return nil, err + } + + graphQLResponse, ok := respData.(*response.GraphQlResponse) + if !ok || graphQLResponse == nil { + return nil, newErrorResponseTypeAssertFailed("*response.GraphQlResponse") + } + + graphQLResponseData := graphQLResponse.Data + + return graphQLResponseData.MessengerMessagingParticipantsByMessageAndEmoji.Participants, nil +} diff --git a/pkg/linkedingo/methods/html.go b/pkg/linkedingo/methods/html.go new file mode 100644 index 0000000..d1a713c --- /dev/null +++ b/pkg/linkedingo/methods/html.go @@ -0,0 +1,30 @@ +package methods + +import ( + "fmt" + "regexp" +) + +var ( + MetaTagRegex = `` + FsdProfileRegex = regexp.MustCompile(`urn:li:fsd_profile:[A-Za-z0-9]*-sub0`) +) + +func ParseMetaTagValue(html string, name string) string { + metaRegexp := regexp.MustCompile(fmt.Sprintf(MetaTagRegex, name)) + matches := metaRegexp.FindStringSubmatch(html) + if len(matches) < 2 { + return "" + } + + return matches[1] +} + +func ParseFsdProfileID(html string) string { + matches := FsdProfileRegex.FindStringSubmatch(html) + if len(matches) < 1 { + return "" + } + + return matches[0] +} diff --git a/pkg/linkedingo/methods/methods.go b/pkg/linkedingo/methods/methods.go new file mode 100644 index 0000000..6c721fb --- /dev/null +++ b/pkg/linkedingo/methods/methods.go @@ -0,0 +1,64 @@ +package methods + +import ( + "fmt" + "net/url" + "reflect" + "strings" + + "go.mau.fi/util/random" +) + +// this works btw, just doesn't include invalid bytes +// return string(random.StringBytes(16)) +func GenerateTrackingId() string { + randByteArray := random.Bytes(16) + charArray := make([]rune, len(randByteArray)) + for i, b := range randByteArray { + charArray[i] = rune(b) + } + return string(charArray) +} + +func EncodeGraphQLQuery(definition any) ([]byte, error) { + var sb strings.Builder + sb.WriteString("(") + + v := reflect.ValueOf(definition) + t := v.Type() + + firstField := true + + for i := 0; i < v.NumField(); i++ { + fieldValue := v.Field(i).Interface() + fieldType := t.Field(i) + if !isZeroValue(fieldValue) { + if !firstField { + sb.WriteString(",") + } + firstField = false + graphQlTagName := fieldType.Tag.Get("graphql") + sb.WriteString(fmt.Sprintf("%s:%s", graphQlTagName, url.QueryEscape(fmt.Sprintf("%v", fieldValue)))) + } + } + + sb.WriteString(")") + return []byte(sb.String()), nil +} + +func isZeroValue(value any) bool { + switch v := value.(type) { + case int, int8, int16, int32, int64: + return v == 0 + case uint, uint8, uint16, uint32, uint64: + return v == 0 + case float32, float64: + return v == 0 + case string: + return v == "" + case bool: + return !v + default: + return reflect.DeepEqual(value, reflect.Zero(reflect.TypeOf(value)).Interface()) + } +} diff --git a/pkg/linkedingo/page_loader.go b/pkg/linkedingo/page_loader.go new file mode 100644 index 0000000..cba14cb --- /dev/null +++ b/pkg/linkedingo/page_loader.go @@ -0,0 +1,78 @@ +package linkedingo + +import ( + "fmt" + "net/url" + + "github.com/beeper/linkedin/pkg/linkedingo/methods" + "github.com/beeper/linkedin/pkg/linkedingo/routing" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type CurrentUser struct { + FsdProfileID string +} + +func (u *CurrentUser) GetEncodedFsdID() string { + return url.QueryEscape(u.FsdProfileID) +} + +type PageLoader struct { + client *Client + CurrentUser *CurrentUser + XLiDeviceTrack *types.DeviceTrack + XLiPageInstance string + XLiLang string +} + +func (c *Client) newPageLoader() *PageLoader { + return &PageLoader{ + client: c, + CurrentUser: &CurrentUser{}, + } +} + +func (pl *PageLoader) LoadMessagesPage() error { + messagesDefinition := routing.RequestStoreDefinition[routing.MESSAGES_BASE_URL] + headers := pl.client.buildHeaders(messagesDefinition.HeaderOpts) + _, respBody, err := pl.client.MakeRequest(string(routing.MESSAGES_BASE_URL), string(messagesDefinition.Method), headers, nil, types.NONE) + if err != nil { + return err + } + + mainPageHTML := string(respBody) + + pl.XLiDeviceTrack = pl.ParseDeviceTrackInfo(mainPageHTML) + pl.XLiPageInstance = pl.ParseXLiPageInstance(mainPageHTML) + pl.XLiLang = methods.ParseMetaTagValue(mainPageHTML, "i18nLocale") + + fsdProfileId := methods.ParseFsdProfileID(mainPageHTML) + if fsdProfileId == "" { + return fmt.Errorf("failed to find current user fsd profile id in html response to messaging page") + } + + pl.CurrentUser.FsdProfileID = fsdProfileId + + return nil +} + +func (pl *PageLoader) ParseDeviceTrackInfo(html string) *types.DeviceTrack { + serviceVersion := methods.ParseMetaTagValue(html, "serviceVersion") + return &types.DeviceTrack{ + ClientVersion: serviceVersion, + MpVersion: serviceVersion, + OsName: "web", + TimezoneOffset: 2, + Timezone: "Europe/Stockholm", // TODO scrutinize? + DeviceFormFactor: "DESKTOP", + MpName: "voyager-web", + DisplayDensity: 1.125, + DisplayWidth: 2560.5, + DisplayHeight: 1440, + } +} + +func (pl *PageLoader) ParseXLiPageInstance(html string) string { + clientPageInstanceId := methods.ParseMetaTagValue(html, "clientPageInstanceId") + return "urn:li:page:messaging_index;" + clientPageInstanceId +} diff --git a/pkg/linkedingo/realtime.go b/pkg/linkedingo/realtime.go new file mode 100644 index 0000000..e071f6c --- /dev/null +++ b/pkg/linkedingo/realtime.go @@ -0,0 +1,218 @@ +package linkedingo + +import ( + "bufio" + "context" + "encoding/json" + "errors" + "fmt" + "log" + "net/http" + "strings" + + "github.com/beeper/linkedin/pkg/linkedingo/event" + "github.com/beeper/linkedin/pkg/linkedingo/event/raw" + "github.com/beeper/linkedin/pkg/linkedingo/routing" + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" + + "github.com/google/uuid" +) + +type RealtimeClient struct { + client *Client + http *http.Client + conn *http.Response + cancelFunc context.CancelFunc + sessionID string +} + +func (c *Client) newRealtimeClient() *RealtimeClient { + return &RealtimeClient{ + client: c, + http: &http.Client{ + Transport: &http.Transport{ + Proxy: c.httpProxy, + }, + }, + sessionID: uuid.NewString(), + } +} + +func (rc *RealtimeClient) Connect() error { + extraHeaders := map[string]string{ + "accept": string(types.TEXT_EVENTSTREAM), + "x-li-realtime-session": rc.sessionID, + "x-li-recipe-accept": string(types.JSON_LINKEDIN_NORMALIZED), + "x-li-query-accept": string(types.GRAPHQL), + "x-li-accept": string(types.JSON_LINKEDIN_NORMALIZED), + "x-li-recipe-map": `{"inAppAlertsTopic":"com.linkedin.voyager.dash.deco.identity.notifications.InAppAlert-51","professionalEventsTopic":"com.linkedin.voyager.dash.deco.events.ProfessionalEventDetailPage-57","topCardLiveVideoTopic":"com.linkedin.voyager.dash.deco.video.TopCardLiveVideo-9","tabBadgeUpdateTopic":"com.linkedin.voyager.dash.deco.notifications.RealtimeBadgingItemCountsEvent-1"}`, + "x-li-query-map": `{"topicToGraphQLQueryParams":{"conversationsBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.dc0088938e4fd0220c7694cdc1e7e2f6","variables":{},"extensions":{}},"conversationsTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.dc0088938e4fd0220c7694cdc1e7e2f6","variables":{},"extensions":{}},"conversationDeletesBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.282abe5fa1a242cb76825c32dbbfaede","variables":{},"extensions":{}},"conversationDeletesTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.282abe5fa1a242cb76825c32dbbfaede","variables":{},"extensions":{}},"messageReactionSummariesBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.3173250b03ea4f9f9e138a145cf3d9b4","variables":{},"extensions":{}},"messageReactionSummariesTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.3173250b03ea4f9f9e138a145cf3d9b4","variables":{},"extensions":{}},"messageSeenReceiptsBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.56fd79ca10248ead05369fa7ab1868dc","variables":{},"extensions":{}},"messageSeenReceiptsTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.56fd79ca10248ead05369fa7ab1868dc","variables":{},"extensions":{}},"messagesBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.9a690a85b608d1212fdaed40be3a1465","variables":{},"extensions":{}},"messagesTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.9a690a85b608d1212fdaed40be3a1465","variables":{},"extensions":{}},"replySuggestionBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.412964c3f7f5a67fb0e56b6bb3a00028","variables":{},"extensions":{}},"replySuggestionTopicV2":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.412964c3f7f5a67fb0e56b6bb3a00028","variables":{},"extensions":{}},"typingIndicatorsBroadcastTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.ad2174343a09cd7ef53b2e6f633695fe","variables":{},"extensions":{}},"typingIndicatorsTopic":{"queryId":"voyagerMessagingDashMessengerRealtimeDecoration.ad2174343a09cd7ef53b2e6f633695fe","variables":{},"extensions":{}},"messagingSecondaryPreviewBannerTopic":{"queryId":"voyagerMessagingDashRealtimeDecoration.60068248c1f5c683ad2557f7ccfdf188","variables":{},"extensions":{}},"reactionsTopic":{"queryId":"liveVideoVoyagerSocialDashRealtimeDecoration.b8b33dedca7efbe34f1d7e84c3b3aa81","variables":{},"extensions":{}},"commentsTopic":{"queryId":"liveVideoVoyagerSocialDashRealtimeDecoration.c582028e0b04485c17e4324d3f463e11","variables":{},"extensions":{}},"reactionsOnCommentsTopic":{"queryId":"liveVideoVoyagerSocialDashRealtimeDecoration.0a181b05b3751f72ae3eb489b77e3245","variables":{},"extensions":{}},"socialPermissionsPersonalTopic":{"queryId":"liveVideoVoyagerSocialDashRealtimeDecoration.170bf3bfbcca1da322e34f34f37fb954","variables":{},"extensions":{}},"liveVideoPostTopic":{"queryId":"liveVideoVoyagerFeedDashLiveUpdatesRealtimeDecoration.ccc245beb0ba0d99bd1df96a1fc53abc","variables":{},"extensions":{}},"generatedJobDescriptionsTopic":{"queryId":"voyagerHiringDashRealtimeDecoration.58501bc70ea8ce6b858527fb1be95007","variables":{},"extensions":{}},"eventToastsTopic":{"queryId":"voyagerEventsDashProfessionalEventsRealtimeResource.6b42abd3511e267e84a6765257deea50","variables":{},"extensions":{}},"coachStreamingResponsesTopic":{"queryId":"voyagerCoachDashGaiRealtimeDecoration.c5707587cf5d95191185235cf15d5129","variables":{},"extensions":{}},"realtimeSearchResultClustersTopic":{"queryId":"voyagerSearchDashRealtimeDecoration.545edd9da8c728b0854505ab6df11870","variables":{},"extensions":{}}}}`, + } + headerOpts := types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiProtocolVer: true, + Extra: extraHeaders, + Referer: string(routing.MESSAGES_BASE_URL) + "/", + } + headers := rc.client.buildHeaders(headerOpts) + + ctx, cancel := context.WithCancel(context.Background()) + rc.cancelFunc = cancel + + req, err := http.NewRequestWithContext(ctx, "GET", string(routing.REALTIME_CONNECT_URL)+"?rc=1", nil) // ("GET", string(routing.REALTIME_CONNECT_URL) + "?rc=1", nil) + if err != nil { + return err + } + req.Header = headers + + conn, err := rc.http.Do(req) + if err != nil { + return err + } + + if conn.StatusCode != http.StatusOK { + return fmt.Errorf("bad status: %s", conn.Status) + } + + rc.conn = conn + go rc.beginReadStream() + + return nil +} + +func (rc *RealtimeClient) beginReadStream() { + reader := bufio.NewReader(rc.conn.Body) + for { + line, err := reader.ReadString('\n') + if err != nil { + if errors.Is(err, context.Canceled) { // currently only means that Disconnect() was called + break + } + log.Fatalf("error reading from event stream: %s", err.Error()) + } + + line = strings.TrimSpace(line) + if len(line) == 0 { + continue + } + + if strings.HasPrefix(line, "data: ") { + eventDataString := strings.TrimPrefix(line, "data: ") + var eventData map[types.RealtimeEvent]json.RawMessage + err = json.Unmarshal([]byte(eventDataString), &eventData) + if err != nil { + log.Printf("error unmarshaling JSON event data: %v\n", err) + continue + } + + rc.processEvents(eventData) + } + } +} + +func (rc *RealtimeClient) Disconnect() error { + if rc.conn == nil { + return fmt.Errorf("realtime client is not connected yet") + } + + if rc.cancelFunc == nil { + return fmt.Errorf("cancel func is somehow nil, can not disconnect real-time client") + } + + rc.cancelFunc() + + rc.conn = nil + rc.cancelFunc = nil + rc.sessionID = uuid.NewString() + + if rc.client.eventHandler != nil { + rc.client.eventHandler(event.ConnectionClosed{ + Reason: types.SELF_DISCONNECT_ISSUED, + }) + } + + return nil +} + +func (rc *RealtimeClient) processEvents(data map[types.RealtimeEvent]json.RawMessage) { + for eventType, eventDataBytes := range data { + switch eventType { + case types.DecoratedEvent: + var decoratedEventResponse raw.DecoratedEventResponse + err := json.Unmarshal(eventDataBytes, &decoratedEventResponse) + if err != nil { + log.Fatalf("failed to unmarshal event bytes with type %s into raw.DecoratedEventResponse", eventType) + } + log.Println(string(eventDataBytes)) + rc.processDecoratedEvent(decoratedEventResponse) + case types.HeartBeat: + log.Println("received heartbeat") + case types.ClientConnectionEvent: + if rc.client.eventHandler != nil { + rc.client.eventHandler(event.ConnectionReady{}) + } + default: + rc.client.Logger.Warn().Str("json_data", string(eventDataBytes)).Str("event_type", string(eventType)).Msg("Received unknown event") + } + } +} + +func (rc *RealtimeClient) processDecoratedEvent(data raw.DecoratedEventResponse) { + var evtData any + topic, topicChunks := parseRealtimeTopic(data.Topic) + switch topic { + case types.MessagesTopic: + renderFormat := data.Payload.Data.DecoratedMessage.Result.MessageBodyRenderFormat + switch renderFormat { + case response.RenderFormatDefault: + evtData = data.Payload.Data.ToMessageEvent() + case response.RenderFormatEdited: + evtData = data.Payload.Data.ToMessageEditedEvent() + case response.RenderFormatReCalled: + evtData = data.Payload.Data.ToMessageDeleteEvent() + case response.RenderFormatSystem: + evtData = data.Payload.Data.ToSystemMessageEvent() + default: + rc.client.Logger.Warn().Any("json_data", data.Payload).Str("format", string(renderFormat)).Msg("Received unknown message body render format") + } + case types.MessageReactionSummariesTopic: + evtData = data.Payload.Data.ToMessageReactionEvent() + case types.TypingIndicatorsTopic: + evtData = data.Payload.Data.ToTypingIndicatorEvent() + case types.PresenceStatusTopic: + fsdProfileId := topicChunks[:-0] + log.Println("presence updated for user id:", fsdProfileId) + evtData = data.Payload.ToPresenceStatusUpdateEvent(fsdProfileId[0]) + case types.MessageSeenReceiptsTopic: + evtData = data.Payload.Data.ToMessageSeenEvent() + case types.ConversationsTopic: + evtData = data.Payload.Data.ToThreadUpdateEvent() + case types.ConversationsDeleteTopic: + evtData = data.Payload.Data.ToThreadDeleteEvent() + /* Ignored event topics */ + case types.JobPostingPersonalTopic: + case types.SocialPermissionsPersonalTopic: + case types.MessagingProgressIndicatorTopic: + case types.MessagingDataSyncTopic: + case types.InvitationsTopic: + case types.InAppAlertsTopic: + case types.ReplySuggestionTopicV2: + case types.TabBadgeUpdateTopic: + break + default: + rc.client.Logger.Warn().Any("json_data", data.Payload).Str("event_topic", string(data.Topic)).Msg("Received unknown event topic") + } + + if evtData != nil { + rc.client.eventHandler(evtData) + } +} + +func parseRealtimeTopic(topic string) (types.RealtimeEventTopic, []string) { + topicChunks := strings.Split(topic, ":") + return types.RealtimeEventTopic(topicChunks[2]), topicChunks +} diff --git a/pkg/linkedingo/routing/endpoints.go b/pkg/linkedingo/routing/endpoints.go new file mode 100644 index 0000000..87c408e --- /dev/null +++ b/pkg/linkedingo/routing/endpoints.go @@ -0,0 +1,18 @@ +package routing + +const BASE_HOST = "www.linkedin.com" + +type RequestEndpointURL string + +const ( + BASE_URL RequestEndpointURL = "https://" + BASE_HOST + MESSAGES_BASE_URL RequestEndpointURL = BASE_URL + "/messaging" + VOYAGER_GRAPHQL_URL = BASE_URL + "/voyager/api/graphql" + VOYAGER_COMMON_ME_URL = BASE_URL + "/voyager/api/me" + VOYAGER_MESSAGING_GRAPHQL_URL = BASE_URL + "/voyager/api/voyagerMessagingGraphQL/graphql" + VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL = BASE_URL + "/voyager/api/voyagerMessagingDashMessengerMessages" + VOYAGER_MESSAGING_DASH_MESSENGER_CONVERSATIONS_URL = BASE_URL + "/voyager/api/voyagerMessagingDashMessengerConversations" + VOYAGER_MEDIA_UPLOAD_METADATA_URL = BASE_URL + "/voyager/api/voyagerVideoDashMediaUploadMetadata" + REALTIME_CONNECT_URL = BASE_URL + "/realtime/connect" + LOGOUT_URL = BASE_URL + "/uas/logout" +) diff --git a/pkg/linkedingo/routing/payload/graphql.go b/pkg/linkedingo/routing/payload/graphql.go new file mode 100644 index 0000000..20c1c84 --- /dev/null +++ b/pkg/linkedingo/routing/payload/graphql.go @@ -0,0 +1,27 @@ +package payload + +import "encoding/json" + +type GraphQLPatchBody struct { + Patch Patch `json:"patch,omitempty"` +} + +func (p GraphQLPatchBody) Encode() ([]byte, error) { + return json.Marshal(p) +} + +type Set struct { + Body any `json:"body,omitempty"` +} + +type Patch struct { + Set any `json:"$set,omitempty"` +} + +type PatchEntitiesPayload struct { + Entities map[string]GraphQLPatchBody `json:"entities,omitempty"` +} + +func (p PatchEntitiesPayload) Encode() ([]byte, error) { + return json.Marshal(p) +} diff --git a/pkg/linkedingo/routing/payload/media.go b/pkg/linkedingo/routing/payload/media.go new file mode 100644 index 0000000..3848cb2 --- /dev/null +++ b/pkg/linkedingo/routing/payload/media.go @@ -0,0 +1,20 @@ +package payload + +import "encoding/json" + +type MediaUploadType string + +const ( + MESSAGING_PHOTO_ATTACHMENT MediaUploadType = "MESSAGING_PHOTO_ATTACHMENT" + MESSAGING_FILE_ATTACHMENT MediaUploadType = "MESSAGING_FILE_ATTACHMENT" +) + +type UploadMediaMetadataPayload struct { + MediaUploadType MediaUploadType `json:"mediaUploadType,omitempty"` + FileSize int `json:"fileSize,omitempty"` + Filename string `json:"filename,omitempty"` +} + +func (p UploadMediaMetadataPayload) Encode() ([]byte, error) { + return json.Marshal(p) +} diff --git a/pkg/linkedingo/routing/payload/messaging.go b/pkg/linkedingo/routing/payload/messaging.go new file mode 100644 index 0000000..30e56cf --- /dev/null +++ b/pkg/linkedingo/routing/payload/messaging.go @@ -0,0 +1,236 @@ +package payload + +import ( + "encoding/json" + + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type SendMessagePayload struct { + Message SendMessageData `json:"message,omitempty"` + MailboxUrn string `json:"mailboxUrn,omitempty"` + TrackingID string `json:"trackingId,omitempty"` + DedupeByClientGeneratedToken bool `json:"dedupeByClientGeneratedToken"` + HostRecipientUrns []string `json:"hostRecipientUrns,omitempty"` + ConversationTitle string `json:"conversationTitle,omitempty"` +} + +func (p SendMessagePayload) Encode() ([]byte, error) { + return json.Marshal(p) +} + +type SendMessageData struct { + Body MessageBody `json:"body,omitempty"` + RenderContentUnions []RenderContent `json:"renderContentUnions,omitempty"` + ConversationUrn string `json:"conversationUrn,omitempty"` + OriginToken string `json:"originToken,omitempty"` +} + +type AttributeBold struct { + Typename string `json:"__typename"` + Type string `json:"_type"` + RecipeType string `json:"_recipeType"` +} + +type AttributeKind struct { + Hyperlink any `json:"hyperlink"` + ListItem any `json:"listItem"` + Paragraph any `json:"paragraph"` + LineBreak any `json:"lineBreak"` + Subscript any `json:"subscript"` + Underline any `json:"underline"` + Superscript any `json:"superscript"` + Bold AttributeBold `json:"bold"` + List any `json:"list"` + Italic any `json:"italic"` + Entity any `json:"entity"` +} + +type Attributes struct { + Start int `json:"start"` + Length int `json:"length"` + Type string `json:"_type"` + RecipeType string `json:"_recipeType"` + AttributeKind AttributeKind `json:"attributeKind"` +} + +type MessageBody struct { + Type string `json:"_type,omitempty"` + Attributes []Attributes `json:"attributes,omitempty"` + Text string `json:"text"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type StartTypingPayload struct { + ConversationUrn string `json:"conversationUrn,omitempty"` +} + +func (p StartTypingPayload) Encode() ([]byte, error) { + return json.Marshal(p) +} + +type DeleteMessagePayload struct { + MessageUrn string `json:"messageUrn,omitempty"` +} + +func (p DeleteMessagePayload) Encode() ([]byte, error) { + return json.Marshal(p) +} + +type RenderContent struct { + Audio *Audio `json:"audio,omitempty"` + AwayMessage any `json:"awayMessage,omitempty"` + ConversationAdsMessageContent any `json:"conversationAdsMessageContent,omitempty"` + ExternalMedia *ExternalMedia `json:"externalMedia,omitempty"` + File *File `json:"file,omitempty"` + ForwardedMessageContent *ForwardedMessageContent `json:"forwardedMessageContent,omitempty"` + HostUrnData any `json:"hostUrnData,omitempty"` + MessageAdRenderContent any `json:"messageAdRenderContent,omitempty"` + RepliedMessageContent *RepliedMessageContent `json:"repliedMessageContent,omitempty"` + UnavailableContent any `json:"unavailableContent,omitempty"` + VectorImage *VectorImage `json:"vectorImage,omitempty"` + Video *Video `json:"video,omitempty"` + VideoMeeting any `json:"videoMeeting,omitempty"` +} + +type Video struct { + Thumbnail struct { + DigitalMediaAsset any `json:"digitalmediaAsset,omitempty"` + Type string `json:"_type,omitempty"` + Attribution any `json:"attribution,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + FocalPoint any `json:"focalPoint,omitempty"` + Artifacts []struct { + Width int `json:"width,omitempty"` + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + FileIdentifyingUrlPathSegment string `json:"fileIdentifyingUrlPathSegment,omitempty"` + Height int `json:"height,omitempty"` + } `json:"artifacts,omitempty"` + RootUrl string `json:"rootUrl,omitempty"` + } `json:"thumbnail,omitempty"` + ProgressiveStreams []struct { + StreamingLocations []struct { + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Url string `json:"url,omitempty"` + ExpiresAt any `json:"expiresAt,omitempty"` + } `json:"streamingLocations,omitempty"` + Size int `json:"size,omitempty"` + BitRate int `json:"bitRate,omitempty"` + Width int `json:"width,omitempty"` + Type string `json:"_type,omitempty"` + MediaType string `json:"mediaType,omitempty"` + MimeType any `json:"mimeType,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Height int `json:"height,omitempty"` + } `json:"progressiveStreams,omitempty"` + LiveStreamCreatedAt any `json:"liveStreamCreatedAt,omitempty"` + Transcripts []any `json:"transcripts,omitempty"` + PrevMedia any `json:"prevMedia,omitempty"` + Type string `json:"_type,omitempty"` + AspectRatio float64 `json:"aspectRatio,omitempty"` + Media string `json:"media,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + AdaptiveStreams []any `json:"adaptiveStreams,omitempty"` + LiveStreamEndedAt any `json:"liveStreamEndedAt,omitempty"` + Duration int `json:"duration,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + Provider string `json:"provider,omitempty"` + NextMedia any `json:"nextMedia,omitempty"` + TrackingId string `json:"trackingId,omitempty"` +} + +type RepliedMessageContent struct { + OriginalSenderUrn string `json:"originalSenderUrn,omitempty"` + OriginalSendAt int64 `json:"originalSendAt,omitempty"` + OriginalMessageUrn string `json:"originalMessageUrn,omitempty"` + MessageBody MessageBody `json:"messageBody,omitempty"` +} + +type ForwardedMessageContent struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + FooterText FooterText `json:"footerText,omitempty"` + ForwardedBody ForwardedBody `json:"forwardedBody,omitempty"` + OriginalSendAt int64 `json:"originalSendAt,omitempty"` + OriginalSender types.ConversationParticipant `json:"originalSender,omitempty"` +} + +type FooterText struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + Attributes []any `json:"attributes,omitempty"` + Text string `json:"text,omitempty"` +} + +type ForwardedBody struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + Attributes []any `json:"attributes,omitempty"` + Text string `json:"text,omitempty"` +} + +type Audio struct { + Duration int `json:"duration,omitempty"` + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + URL string `json:"url,omitempty"` +} + +type VectorImage struct { + DigitalmediaAsset string `json:"digitalmediaAsset,omitempty"` + Type string `json:"_type,omitempty"` + Attribution any `json:"attribution,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + FocalPoint any `json:"focalPoint,omitempty"` + RootURL string `json:"rootUrl,omitempty"` + Artifacts []any `json:"artifacts,omitempty"` +} + +type Media struct { + Type string `json:"_type,omitempty"` + OriginalHeight int `json:"originalHeight,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + OriginalWidth int `json:"originalWidth,omitempty"` + URL string `json:"url,omitempty"` +} +type PreviewMedia struct { + Type string `json:"_type,omitempty"` + OriginalHeight int `json:"originalHeight,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + OriginalWidth int `json:"originalWidth,omitempty"` + URL string `json:"url,omitempty"` +} + +type ExternalMedia struct { + Type string `json:"_type,omitempty"` + Media Media `json:"media,omitempty"` + Title string `json:"title,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + PreviewMedia PreviewMedia `json:"previewMedia,omitempty"` +} + +type File struct { + AssetUrn string `json:"assetUrn,omitempty"` + ByteSize int `json:"byteSize,omitempty"` + MediaType types.ContentType `json:"mediaType,omitempty"` + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` + Type string `json:"type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type MarkThreadReadBody struct { + Read bool `json:"read"` +} + +type SendReactionPayload struct { + MessageUrn string `json:"messageUrn,omitempty"` + Emoji string `json:"emoji,omitempty"` +} + +func (p SendReactionPayload) Encode() ([]byte, error) { + return json.Marshal(p) +} diff --git a/pkg/linkedingo/routing/query/graphql.go b/pkg/linkedingo/routing/query/graphql.go new file mode 100644 index 0000000..b9211b5 --- /dev/null +++ b/pkg/linkedingo/routing/query/graphql.go @@ -0,0 +1,17 @@ +package query + +import ( + "fmt" + + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type GraphQLQuery struct { + IncludeWebMetadata bool `url:"includeWebMetadata,omitempty"` + QueryID types.GraphQLQueryIDs `url:"queryId,omitempty"` + Variables string `url:"variables,omitempty"` +} + +func (q *GraphQLQuery) Encode() ([]byte, error) { + return []byte(fmt.Sprintf("queryId=%s&variables=%s", q.QueryID, q.Variables)), nil +} diff --git a/pkg/linkedingo/routing/query/logout.go b/pkg/linkedingo/routing/query/logout.go new file mode 100644 index 0000000..15b859b --- /dev/null +++ b/pkg/linkedingo/routing/query/logout.go @@ -0,0 +1,17 @@ +package query + +import ( + "github.com/google/go-querystring/query" +) + +type LogoutQuery struct { + CsrfToken string `url:"csrfToken"` +} + +func (p *LogoutQuery) Encode() ([]byte, error) { + values, err := query.Values(p) + if err != nil { + return nil, err + } + return []byte(values.Encode()), nil +} diff --git a/pkg/linkedingo/routing/query/messaging.go b/pkg/linkedingo/routing/query/messaging.go new file mode 100644 index 0000000..6887f58 --- /dev/null +++ b/pkg/linkedingo/routing/query/messaging.go @@ -0,0 +1,68 @@ +package query + +import ( + "github.com/beeper/linkedin/pkg/linkedingo/methods" +) + +type Action string + +const ( + ACTION_CREATE_MESSAGE Action = "createMessage" + ACTION_TYPING Action = "typing" + ACTION_UPLOAD Action = "upload" + ACTION_RECALL Action = "recall" + ACTION_REACT_WITH_EMOJI Action = "reactWithEmoji" + ACTION_UNREACT_WITH_EMOJI Action = "unreactWithEmoji" +) + +type DoActionQuery struct { + Action Action `url:"action"` +} + +func (q DoActionQuery) Encode() ([]byte, error) { + return []byte("action=" + q.Action), nil +} + +type InboxCategory string + +const ( + INBOX_CATEGORY_OTHER InboxCategory = "OTHER" + INBOX_CATEGORY_ARCHIVE InboxCategory = "ARCHIVE" + INBOX_CATEGORY_INBOX InboxCategory = "INBOX" + INBOX_CATEGORY_PRIMARY InboxCategory = "PRIMARY_INBOX" + INBOX_CATEGORY_SECONDARY InboxCategory = "SECONDARY_INBOX" +) + +type GetThreadsVariables struct { + InboxCategory InboxCategory `graphql:"category"` + Count int64 `graphql:"count"` + MailboxUrn string `graphql:"mailboxUrn"` + LastUpdatedBefore int64 `graphql:"lastUpdatedBefore"` + NextCursor string `graphql:"nextCursor"` +} + +func (q GetThreadsVariables) Encode() ([]byte, error) { + return methods.EncodeGraphQLQuery(q) +} + +type FetchMessagesVariables struct { + DeliveredAt int64 `graphql:"deliveredAt"` + ConversationUrn string `graphql:"conversationUrn"` + Count int64 `graphql:"count"` + PrevCursor string `graphql:"prevCursor"` + CountBefore int64 `graphql:"countBefore"` + CountAfter int64 `graphql:"countAfter"` +} + +func (q FetchMessagesVariables) Encode() ([]byte, error) { + return methods.EncodeGraphQLQuery(q) +} + +type GetReactionsForEmojiVariables struct { + Emoji string `graphql:"emoji"` + MessageUrn string `graphql:"messageUrn"` +} + +func (q GetReactionsForEmojiVariables) Encode() ([]byte, error) { + return methods.EncodeGraphQLQuery(q) +} diff --git a/pkg/linkedingo/routing/response/graphql.go b/pkg/linkedingo/routing/response/graphql.go new file mode 100644 index 0000000..8d46654 --- /dev/null +++ b/pkg/linkedingo/routing/response/graphql.go @@ -0,0 +1,24 @@ +package response + +import "encoding/json" + +type GraphQlResponse struct { + Data GraphQLData `json:"data,omitempty"` +} + +type GraphQLData struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + MessengerConversationsBySyncToken *MessengerConversationsResponse `json:"messengerConversationsBySyncToken,omitempty"` + MessengerConversationsByCategory *MessengerConversationsResponse `json:"messengerConversationsByCategory,omitempty"` + + MessengerMessagesBySyncToken *MessengerMessagesResponse `json:"messengerMessagesBySyncToken,omitempty"` + MessengerMessagesByAnchorTimestamp *MessengerMessagesResponse `json:"messengerMessagesByAnchorTimestamp,omitempty"` + MessengerMessagesByConversation *MessengerMessagesResponse `json:"messengerMessagesByConversation,omitempty"` + MessengerMessagingParticipantsByMessageAndEmoji *MessengerMessagingParticipantsByMessageAndEmojiResponse `json:"messengerMessagingParticipantsByMessageAndEmoji,omitempty"` +} + +func (r GraphQlResponse) Decode(data []byte) (any, error) { + respData := &GraphQlResponse{} + return respData, json.Unmarshal(data, &respData) +} diff --git a/pkg/linkedingo/routing/response/media.go b/pkg/linkedingo/routing/response/media.go new file mode 100644 index 0000000..9d5a431 --- /dev/null +++ b/pkg/linkedingo/routing/response/media.go @@ -0,0 +1,31 @@ +package response + +import "encoding/json" + +type UploadMediaMetadataResponse struct { + Data MediaMetadataValue `json:"data,omitempty"` + Included []any `json:"included,omitempty"` +} + +func (r UploadMediaMetadataResponse) Decode(data []byte) (any, error) { + respData := &UploadMediaMetadataResponse{} + return respData, json.Unmarshal(data, &respData) +} + +type SingleUploadHeaders struct { +} +type MediaMetadata struct { + Urn string `json:"urn,omitempty"` + MediaArtifactUrn string `json:"mediaArtifactUrn,omitempty"` + Recipes []string `json:"recipes,omitempty"` + SingleUploadHeaders SingleUploadHeaders `json:"singleUploadHeaders,omitempty"` + AssetRealtimeTopic string `json:"assetRealtimeTopic,omitempty"` + PollingURL string `json:"pollingUrl,omitempty"` + SingleUploadURL string `json:"singleUploadUrl,omitempty"` + Type string `json:"type,omitempty"` + Type0 string `json:"$type,omitempty"` +} +type MediaMetadataValue struct { + Value MediaMetadata `json:"value,omitempty"` + Type string `json:"$type,omitempty"` +} diff --git a/pkg/linkedingo/routing/response/messaging.go b/pkg/linkedingo/routing/response/messaging.go new file mode 100644 index 0000000..7ff414c --- /dev/null +++ b/pkg/linkedingo/routing/response/messaging.go @@ -0,0 +1,218 @@ +package response + +import ( + "encoding/json" + + "github.com/beeper/linkedin/pkg/linkedingo/routing/payload" + "github.com/beeper/linkedin/pkg/linkedingo/routing/query" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type Metadata struct { + NextCursor string `json:"nextCursor,omitempty"` + PrevCursor string `json:"prevCursor,omitempty"` + NewSyncToken string `json:"newSyncToken,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + ShouldClearCache bool `json:"shouldClearCache,omitempty"` + DeletedUrns []any `json:"deletedUrns,omitempty"` +} + +type DisabledFeatures struct { + Type string `json:"_type,omitempty"` + DisabledFeature string `json:"disabledFeature,omitempty"` + ReasonText any `json:"reasonText,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type Creator struct { + HostIdentityUrn string `json:"hostIdentityUrn,omitempty"` + Preview any `json:"preview,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + ShowPremiumInBug bool `json:"showPremiumInBug,omitempty"` + ShowVerificationBadge bool `json:"showVerificationBadge,omitempty"` + Type string `json:"_type,omitempty"` + ParticipantType types.ParticipantType `json:"participantType,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + BackendUrn string `json:"backendUrn,omitempty"` +} + +type Sender struct { + HostIdentityUrn string `json:"hostIdentityUrn,omitempty"` + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + ShowPremiumInBug bool `json:"showPremiumInBug,omitempty"` + ShowVerificationBadge bool `json:"showVerificationBadge,omitempty"` +} + +type Conversation struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` +} + +type MessageBodyRenderFormat string + +const ( + RenderFormatDefault MessageBodyRenderFormat = "DEFAULT" + RenderFormatEdited MessageBodyRenderFormat = "EDITED" + RenderFormatReCalled MessageBodyRenderFormat = "RECALLED" + RenderFormatSystem MessageBodyRenderFormat = "SYSTEM" +) + +type MessageElement struct { + ReactionSummaries []ReactionSummary `json:"reactionSummaries,omitempty"` + Footer any `json:"footer,omitempty"` + Subject any `json:"subject,omitempty"` + Type string `json:"_type,omitempty"` + InlineWarning any `json:"inlineWarning,omitempty"` + Body payload.MessageBody `json:"body,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + OriginToken string `json:"originToken,omitempty"` + BackendUrn string `json:"backendUrn,omitempty"` + DeliveredAt int64 `json:"deliveredAt,omitempty"` + Actor types.ConversationParticipant `json:"actor,omitempty"` + RenderContentFallbackText any `json:"renderContentFallbackText,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + Sender types.ConversationParticipant `json:"sender,omitempty"` + BackendConversationUrn string `json:"backendConversationUrn,omitempty"` + IncompleteRetriableData bool `json:"incompleteRetriableData,omitempty"` + MessageBodyRenderFormat MessageBodyRenderFormat `json:"messageBodyRenderFormat,omitempty"` + RenderContent []payload.RenderContent `json:"renderContent,omitempty"` + Conversation Conversation `json:"conversation,omitempty"` + PreviousMessages Messages `json:"previousMessages,omitempty"` +} + +type Messages struct { + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Messages []MessageElement `json:"elements,omitempty"` +} + +type ThreadElement struct { + NotificationStatus string `json:"notificationStatus,omitempty"` + ConversationParticipants []types.ConversationParticipant `json:"conversationParticipants,omitempty"` + UnreadCount int `json:"unreadCount,omitempty"` + ConversationVerificationLabel any `json:"conversationVerificationLabel,omitempty"` + LastActivityAt int64 `json:"lastActivityAt,omitempty"` + DescriptionText any `json:"descriptionText,omitempty"` + ConversationVerificationExplanation any `json:"conversationVerificationExplanation,omitempty"` + Title string `json:"title,omitempty"` + BackendUrn string `json:"backendUrn,omitempty"` + ShortHeadlineText any `json:"shortHeadlineText,omitempty"` + CreatedAt int64 `json:"createdAt,omitempty"` + LastReadAt int64 `json:"lastReadAt,omitempty"` + HostConversationActions []any `json:"hostConversationActions,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + Categories []query.InboxCategory `json:"categories,omitempty"` + State any `json:"state,omitempty"` + DisabledFeatures []DisabledFeatures `json:"disabledFeatures,omitempty"` + Creator Creator `json:"creator,omitempty"` + Read bool `json:"read,omitempty"` + GroupChat bool `json:"groupChat,omitempty"` + Type string `json:"_type,omitempty"` + ContentMetadata any `json:"contentMetadata,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + ConversationURL string `json:"conversationUrl,omitempty"` + HeadlineText any `json:"headlineText,omitempty"` + IncompleteRetriableData bool `json:"incompleteRetriableData,omitempty"` + MessageElements Messages `json:"messages,omitempty"` + ConversationTypeText any `json:"conversationTypeText,omitempty"` +} + +type MessageSeenReceipt struct { + Type string `json:"_type,omitempty"` + SeenAt int64 `json:"seenAt,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Message MessageReceiptData `json:"message,omitempty"` + SeenByParticipant types.ConversationParticipant `json:"seenByParticipant,omitempty"` +} + +type MessageReceiptData struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` +} + +type MessengerConversationsResponse struct { + Type string `json:"_type,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Threads []ThreadElement `json:"elements,omitempty"` +} + +type MessengerMessagesResponse struct { + Type string `json:"_type,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Messages []MessageElement `json:"elements,omitempty"` +} + +type MessengerMessagingParticipantsByMessageAndEmojiResponse struct { + Type string `json:"_type,omitempty"` + Metadata Metadata `json:"metadata,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Participants []types.ConversationParticipant `json:"elements,omitempty"` +} + +type TypingIndicator struct { + Type string `json:"_type,omitempty"` + TypingParticipant types.ConversationParticipant `json:"typingParticipant,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Conversation Conversation `json:"conversation,omitempty"` +} + +type ShortMessageElement struct { + RecipeType string `json:"_recipeType,omitempty"` + Type string `json:"_type,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` +} + +type ReactionSummary struct { + Count int `json:"count,omitempty"` + Type string `json:"_type,omitempty"` + FirstReactedAt int64 `json:"firstReactedAt,omitempty"` + Emoji string `json:"emoji,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + ViewerReacted bool `json:"viewerReacted,omitempty"` +} + +type MessageReaction struct { + ReactionAdded bool `json:"reactionAdded,omitempty"` + Type string `json:"_type,omitempty"` + Actor types.ConversationParticipant `json:"actor,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Message ShortMessageElement `json:"message,omitempty"` + ReactionSummary ReactionSummary `json:"reactionSummary,omitempty"` +} + +type MessageSentResponse struct { + Data MessageSentData `json:"value,omitempty"` +} + +func (r MessageSentResponse) Decode(data []byte) (any, error) { + respData := &MessageSentResponse{} + return respData, json.Unmarshal(data, &respData) +} + +type MessageSentData struct { + RenderContentUnions []payload.RenderContent `json:"renderContentUnions,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + BackendConversationUrn string `json:"backendConversationUrn,omitempty"` + SenderUrn string `json:"senderUrn,omitempty"` + OriginToken string `json:"originToken,omitempty"` + Body payload.MessageBody `json:"body,omitempty"` + BackendUrn string `json:"backendUrn,omitempty"` + ConversationUrn string `json:"conversationUrn,omitempty"` + DeliveredAt int64 `json:"deliveredAt,omitempty"` +} + +type MarkThreadReadResponse struct { + Results map[string]MarkThreadReadResult `json:"results,omitempty"` + Errors any `json:"errors,omitempty"` +} + +type MarkThreadReadResult struct { + Status int `json:"status,omitempty"` +} diff --git a/pkg/linkedingo/routing/store.go b/pkg/linkedingo/routing/store.go new file mode 100644 index 0000000..578028d --- /dev/null +++ b/pkg/linkedingo/routing/store.go @@ -0,0 +1,110 @@ +package routing + +import ( + "github.com/beeper/linkedin/pkg/linkedingo/routing/response" + "github.com/beeper/linkedin/pkg/linkedingo/types" +) + +type PayloadDataInterface interface { + Encode() ([]byte, error) +} + +type ResponseDataInterface interface { + Decode(data []byte) (any, error) +} + +type RequestEndpointInfo struct { + Method string + HeaderOpts types.HeaderOpts + ContentType types.ContentType + ResponseDefinition ResponseDataInterface +} + +var RequestStoreDefinition = map[RequestEndpointURL]RequestEndpointInfo{ + MESSAGES_BASE_URL: { + Method: "GET", + ContentType: types.NONE, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + Extra: map[string]string{ + "Sec-Fetch-Dest": "document", + "Sec-Fetch-Mode": "navigate", + "Sec-Fetch-Site": "none", + "Sec-Fetch-User": "?1", + "Upgrade-Insecure-Requests": "1", + }, + }, + }, + VOYAGER_MESSAGING_GRAPHQL_URL: { + Method: "GET", + ContentType: types.NONE, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiProtocolVer: true, + Referer: string(MESSAGES_BASE_URL) + "/", + Extra: map[string]string{ + "accept": string(types.GRAPHQL), + }, + }, + ResponseDefinition: response.GraphQlResponse{}, + }, + VOYAGER_MESSAGING_DASH_MESSENGER_MESSAGES_URL: { + Method: "POST", + ContentType: types.PLAINTEXT_UTF8, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiLang: true, + WithXLiPageInstance: true, + WithXLiTrack: true, + WithXLiProtocolVer: true, + Origin: string(BASE_URL), + Extra: map[string]string{ + "accept": string(types.JSON), + }, + }, + ResponseDefinition: response.MessageSentResponse{}, + }, + VOYAGER_MESSAGING_DASH_MESSENGER_CONVERSATIONS_URL: { + Method: "POST", + ContentType: types.PLAINTEXT_UTF8, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiProtocolVer: true, + WithXLiLang: true, + Origin: string(BASE_URL), + Extra: map[string]string{ + "accept": string(types.JSON), + }, + }, + }, + VOYAGER_MEDIA_UPLOAD_METADATA_URL: { + Method: "POST", + ContentType: types.JSON_PLAINTEXT_UTF8, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + WithCsrfToken: true, + WithXLiTrack: true, + WithXLiPageInstance: true, + WithXLiProtocolVer: true, + WithXLiLang: true, + Extra: map[string]string{ + "accept": string(types.JSON_LINKEDIN_NORMALIZED), + }, + }, + ResponseDefinition: response.UploadMediaMetadataResponse{}, + }, + LOGOUT_URL: { + Method: "GET", + ContentType: types.NONE, + HeaderOpts: types.HeaderOpts{ + WithCookies: true, + }, + }, +} diff --git a/pkg/linkedingo/test_data/testimage1.jpg b/pkg/linkedingo/test_data/testimage1.jpg new file mode 100644 index 0000000..f64c198 Binary files /dev/null and b/pkg/linkedingo/test_data/testimage1.jpg differ diff --git a/pkg/linkedingo/test_data/testvideo1.mp4 b/pkg/linkedingo/test_data/testvideo1.mp4 new file mode 100644 index 0000000..cb2c5cd Binary files /dev/null and b/pkg/linkedingo/test_data/testvideo1.mp4 differ diff --git a/pkg/linkedingo/types/conversations.go b/pkg/linkedingo/types/conversations.go new file mode 100644 index 0000000..91ec018 --- /dev/null +++ b/pkg/linkedingo/types/conversations.go @@ -0,0 +1,56 @@ +package types + +type ConversationParticipant struct { + HostIdentityUrn string `json:"hostIdentityUrn,omitempty"` + Preview any `json:"preview,omitempty"` + EntityUrn string `json:"entityUrn,omitempty"` + ShowPremiumInBug bool `json:"showPremiumInBug,omitempty"` + ShowVerificationBadge bool `json:"showVerificationBadge,omitempty"` + Type string `json:"_type,omitempty"` + ParticipantType ParticipantType `json:"participantType,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + BackendUrn string `json:"backendUrn,omitempty"` +} + +type FirstName struct { + Type string `json:"_type,omitempty"` + Attributes []any `json:"attributes,omitempty"` + Text string `json:"text,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type LastName struct { + Type string `json:"_type,omitempty"` + Attributes []any `json:"attributes,omitempty"` + Text string `json:"text,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type Headline struct { + Type string `json:"_type,omitempty"` + Attributes []any `json:"attributes,omitempty"` + Text string `json:"text,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` +} + +type Member struct { + ProfileURL string `json:"profileUrl,omitempty"` + FirstName FirstName `json:"firstName,omitempty"` + LastName LastName `json:"lastName,omitempty"` + ProfilePicture ProfilePicture `json:"profilePicture,omitempty"` + Distance string `json:"distance,omitempty"` + Pronoun any `json:"pronoun,omitempty"` + Type string `json:"_type,omitempty"` + RecipeType string `json:"_recipeType,omitempty"` + Headline Headline `json:"headline,omitempty"` +} + +type ParticipantType struct { + Member Member `json:"member,omitempty"` + Custom any `json:"custom,omitempty"` + Organization any `json:"organization,omitempty"` +} + +type ProfilePicture struct { + RootURL string `json:"rootUrl,omitempty"` +} diff --git a/pkg/linkedingo/types/device.go b/pkg/linkedingo/types/device.go new file mode 100644 index 0000000..38934f3 --- /dev/null +++ b/pkg/linkedingo/types/device.go @@ -0,0 +1,20 @@ +package types + +import "encoding/json" + +type DeviceTrack struct { + ClientVersion string `json:"clientVersion,omitempty"` + MpVersion string `json:"mpVersion,omitempty"` + OsName string `json:"osName,omitempty"` + TimezoneOffset int `json:"timezoneOffset,omitempty"` + Timezone string `json:"timezone,omitempty"` + DeviceFormFactor string `json:"deviceFormFactor,omitempty"` + MpName string `json:"mpName,omitempty"` + DisplayDensity float64 `json:"displayDensity,omitempty"` + DisplayWidth float64 `json:"displayWidth,omitempty"` + DisplayHeight int `json:"displayHeight,omitempty"` +} + +func (dt *DeviceTrack) Encode() ([]byte, error) { + return json.Marshal(dt) +} diff --git a/pkg/linkedingo/types/events.go b/pkg/linkedingo/types/events.go new file mode 100644 index 0000000..49ab735 --- /dev/null +++ b/pkg/linkedingo/types/events.go @@ -0,0 +1,58 @@ +package types + +type RealtimeEvent string + +const ( + ClientConnectionEvent RealtimeEvent = "com.linkedin.realtimefrontend.ClientConnection" + DecoratedEvent RealtimeEvent = "com.linkedin.realtimefrontend.DecoratedEvent" + HeartBeat RealtimeEvent = "com.linkedin.realtimefrontend.Heartbeat" +) + +type RealtimeEventTopic string + +const ( + ConversationsTopic RealtimeEventTopic = "conversationsTopic" + ConversationsDeleteTopic RealtimeEventTopic = "conversationDeletesTopic" + MessageSeenReceiptsTopic RealtimeEventTopic = "messageSeenReceiptsTopic" + MessagesTopic RealtimeEventTopic = "messagesTopic" + ReplySuggestionTopicV2 RealtimeEventTopic = "replySuggestionTopicV2" + TabBadgeUpdateTopic RealtimeEventTopic = "tabBadgeUpdateTopic" + TypingIndicatorsTopic RealtimeEventTopic = "typingIndicatorsTopic" + InvitationsTopic RealtimeEventTopic = "invitationsTopic" + InAppAlertsTopic RealtimeEventTopic = "inAppAlertsTopic" + MessageReactionSummariesTopic RealtimeEventTopic = "messageReactionSummariesTopic" + SocialPermissionsPersonalTopic RealtimeEventTopic = "socialPermissionsPersonalTopic" + JobPostingPersonalTopic RealtimeEventTopic = "jobPostingPersonalTopic" + MessagingProgressIndicatorTopic RealtimeEventTopic = "messagingProgressIndicatorTopic" + MessagingDataSyncTopic RealtimeEventTopic = "messagingDataSyncTopic" + PresenceStatusTopic RealtimeEventTopic = "presenceStatusTopic" +) + +type LinkedInAPIType string + +const ( + MiniProfile LinkedInAPIType = "com.linkedin.voyager.identity.shared.MiniProfile" + Conversation LinkedInAPIType = "com.linkedin.voyager.messaging.Conversation" + MessagingMember LinkedInAPIType = "com.linkedin.voyager.messaging.MessagingMember" + Event LinkedInAPIType = "com.linkedin.voyager.messaging.Event" +) + +type RealtimeEventType string + +const ( + MessageEvent RealtimeEventType = "com.linkedin.voyager.messaging.event.MessageEvent" +) + +type PresenceAvailabilityStatus string + +const ( + Online PresenceAvailabilityStatus = "ONLINE" + Offline PresenceAvailabilityStatus = "OFFLINE" +) + +type ConnectionClosedReason string + +const ( + SELF_DISCONNECT_ISSUED ConnectionClosedReason = "client called Disconnect() method" + CONNECTION_DROPPED ConnectionClosedReason = "real-time client lost connection to the server" +) diff --git a/pkg/linkedingo/types/graphql.go b/pkg/linkedingo/types/graphql.go new file mode 100644 index 0000000..3e031af --- /dev/null +++ b/pkg/linkedingo/types/graphql.go @@ -0,0 +1,11 @@ +package types + +type GraphQLQueryIDs string + +const ( + GRAPHQL_QUERY_ID_MESSENGER_CONVERSATIONS GraphQLQueryIDs = "messengerConversations.95e0a4b80fbc6bc53550e670d34d05d9" + GRAPHQL_QUERY_ID_MESSENGER_CONVERSATIONS_WITH_CURSOR GraphQLQueryIDs = "messengerConversations.18240d6a3ac199067a703996eeb4b163" + GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_SYNC_TOKEN GraphQLQueryIDs = "messengerMessages.d1b494ac18c24c8be71ea07b5bd1f831" + GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_ANCHOR_TIMESTAMP GraphQLQueryIDs = "messengerMessages.b52340f92136e74c2aab21dac7cf7ff2" + GRAPHQL_QUERY_ID_MESSENGER_MESSAGES_BY_CONVERSATION GraphQLQueryIDs = "messengerMessages.86ca573adc64110d94d8bce89c5b2f3b" +) diff --git a/pkg/linkedingo/types/http.go b/pkg/linkedingo/types/http.go new file mode 100644 index 0000000..85ce157 --- /dev/null +++ b/pkg/linkedingo/types/http.go @@ -0,0 +1,37 @@ +package types + +type RequestMethod string + +const ( + POST RequestMethod = "POST" + GET RequestMethod = "GET" + PUT RequestMethod = "PUT" + DELETE RequestMethod = "DELETE" +) + +type ContentType string + +const ( + NONE ContentType = "" + JSON ContentType = "application/json" + JSON_PLAINTEXT_UTF8 ContentType = "application/json; charset=UTF-8" + JSON_LINKEDIN_NORMALIZED ContentType = "application/vnd.linkedin.normalized+json+2.1" + FORM ContentType = "application/x-www-form-urlencoded" + GRAPHQL ContentType = "application/graphql" + TEXT_EVENTSTREAM ContentType = "text/event-stream" + PLAINTEXT_UTF8 ContentType = "text/plain;charset=UTF-8" + IMAGE_JPEG ContentType = "image/jpeg" + VIDEO_MP4 ContentType = "video/mp4" +) + +type HeaderOpts struct { + WithCookies bool + WithCsrfToken bool + WithXLiTrack bool + WithXLiPageInstance bool + WithXLiProtocolVer bool + WithXLiLang bool + Referer string + Origin string + Extra map[string]string +} diff --git a/pkg/linkedingo/types/user_profile.go b/pkg/linkedingo/types/user_profile.go new file mode 100644 index 0000000..2996833 --- /dev/null +++ b/pkg/linkedingo/types/user_profile.go @@ -0,0 +1,26 @@ +package types + +type UserProfile struct { + FirstName string `json:"firstName"` + LastName string `json:"lastName"` + Occupation string `json:"occupation"` + PublicIdentifier string `json:"publicIdentifier"` + Picture string `json:"picture,omitempty"` + Memorialized bool `json:"memorialized"` + + EntityUrn string `json:"entityUrn"` + ObjectUrn string `json:"objectUrn"` + DashEntityUrn string `json:"dashEntityUrn"` + + TrackingId string `json:"trackingId"` +} + +type UserLoginProfile struct { + PlainId string `json:"plainId"` + MiniProfile UserProfile `json:"miniProfile"` +} + +type GetCommonMeResponse struct { + Data map[string]string `json:"data"` + Included []UserProfile `json:"included"` +} diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index f487dcf..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,21 +0,0 @@ -[[tool.mypy.overrides]] -module = [ - # https://github.com/MagicStack/asyncpg/pull/577 - "asyncpg", - "bs4", - "PIL", - "ruamel", -] -ignore_missing_imports = true - -[tool.isort] -profile = "black" -force_to_top = "typing" -from_first = true -combine_as_imports = true -known_first_party = "mautrix" -line_length = 99 - -[tool.black] -line-length = 99 -target-version = ["py38"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 764bdcc..0000000 --- a/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -aiohttp>=3,<4 -asyncpg>=0.20,<0.30 -beautifulsoup4>=4,<5 -commonmark>=0.8,<0.10 -dataclasses-json>=0.6.3,<0.7 -mautrix>=0.20.5,<0.21 -pycryptodome>=3,<4 -python-magic>=0.4,<0.5 -ruamel.yaml>=0.15.94,<0.19 -yarl>=1,<2 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index f9ddc8a..0000000 --- a/setup.cfg +++ /dev/null @@ -1,8 +0,0 @@ -[flake8] -extend-ignore = ANN101, ANN102, E203 -exclude = .git,__pycache__,.venv -max-line-length = 99 -suppress-none-returning = True -suppress-dummy-args = True -application-import-names = linkedin_matrix -import-order-style = edited diff --git a/setup.py b/setup.py deleted file mode 100644 index 6350540..0000000 --- a/setup.py +++ /dev/null @@ -1,74 +0,0 @@ -import setuptools - -from linkedin_matrix.get_version import git_tag, git_revision, version, linkified_version - -try: - long_desc = open("README.md").read() -except IOError: - long_desc = "Failed to read README.md" - -with open("requirements.txt") as reqs: - install_requires = reqs.read().splitlines() - -with open("optional-requirements.txt") as reqs: - extras_require = {} - current = [] - for line in reqs.read().splitlines(): - if line.startswith("#/"): - extras_require[line[2:]] = current = [] - elif not line or line.startswith("#"): - continue - else: - current.append(line) - -extras_require["all"] = list({dep for deps in extras_require.values() for dep in deps}) - -with open("linkedin_matrix/version.py", "w") as version_file: - version_file.write(f"""# Generated in setup.py - -git_tag = {git_tag!r} -git_revision = {git_revision!r} -version = {version!r} -linkified_version = {linkified_version!r} -""") - -setuptools.setup( - name="linkedin_matrix", - version=version, - url="https://github.com/beeper/linkedin", - project_urls={ - "Changelog": "https://github.com/beeper/linkedin/blob/master/CHANGELOG.md", - }, - - author="Sumner Evans", - author_email="sumner@beeper.com", - - description="A Matrix-LinkedIn Messages puppeting bridge.", - long_description=long_desc, - long_description_content_type="text/markdown", - - packages=setuptools.find_packages(), - - install_requires=install_requires, - extras_require=extras_require, - python_requires=">=3.10", - - keywords=["matrix", "LinkedIn"], - - classifiers=[ - "Development Status :: 4 - Beta", - "License :: OSI Approved :: Apache Software License", - "Topic :: Communications :: Chat", - "Framework :: AsyncIO", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - package_data={ - "linkedin_matrix": ["example-config.yaml"], - }, - data_files=[ - (".", ["linkedin_matrix/example-config.yaml"]), - ], -)