From 45ea74f0123574618ea6af2e7723c2d54d146ce4 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 14 May 2023 02:45:24 +0200 Subject: [PATCH 01/15] feat: cleaning docker-compose feat: migrating from requirements.txt to poetry.lock feat: update on the dockerfile fix: update from the new linter feat: cleaning flake8 + adding more lib to calm down mypy feat: update on deps + some cleaning fix: twitter controller fix: ordering with isort + some updates on controller feat:updates again db: dump (import/export) users and projects working feat: updated the .gitignore --- .github/workflows/api-unit-test.yaml | 13 +- .gitignore | 5 + api/.env.dist | 28 + api/.flake8 | 5 - api/Dockerfile | 19 +- api/Makefile | 52 +- api/README.md | 7 +- api/api.ini | 11 - api/app/__init__.py | 22 - api/app/main/__init__.py | 13 +- api/app/main/config.py | 9 +- api/app/main/controller/github_controller.py | 337 +---- api/app/main/controller/twitter_controller.py | 30 +- api/app/main/utils/converters.py | 1 + api/app/main/utils/database/projects.py | 9 +- .../main/utils/database/search_projects.py | 163 +-- api/app/main/utils/database/search_users.py | 128 +- api/app/main/utils/database/storage.py | 3 - .../main/utils/database/twitter/top_tweets.py | 49 +- api/app/main/utils/database/users.py | 5 +- api/app/main/utils/helpers/cache.py | 26 +- api/app/settings.py | 37 +- api/example.config.txt | 10 +- api/manage.py | 26 +- api/ossdb/projects/export.sh | 27 + api/ossdb/projects/import.sh | 159 +++ api/ossdb/users/export.sh | 27 + api/ossdb/users/import.sh | 67 + api/poetry.lock | 1251 +++++++++++++++++ api/pyproject.toml | 34 + api/requirements.txt | 22 - api/tests/app/.cmd_history | 1 + api/tests/app/test_settings.py | 23 + api/tests/main/test_github_controller.py | 117 ++ api/tests/main/test_twitter_controller.py | 73 + api/tests/test_assert.py | 2 +- api/wsgi.py | 4 - docker-compose.yml | 93 +- gcloud-emulator/Dockerfile-datastore | 26 - 39 files changed, 2170 insertions(+), 764 deletions(-) create mode 100644 api/.env.dist delete mode 100644 api/.flake8 delete mode 100644 api/api.ini create mode 100644 api/ossdb/projects/export.sh create mode 100644 api/ossdb/projects/import.sh create mode 100644 api/ossdb/users/export.sh create mode 100644 api/ossdb/users/import.sh create mode 100644 api/poetry.lock create mode 100644 api/pyproject.toml delete mode 100644 api/requirements.txt create mode 100644 api/tests/app/.cmd_history create mode 100644 api/tests/app/test_settings.py create mode 100644 api/tests/main/test_github_controller.py create mode 100644 api/tests/main/test_twitter_controller.py delete mode 100644 api/wsgi.py delete mode 100644 gcloud-emulator/Dockerfile-datastore diff --git a/.github/workflows/api-unit-test.yaml b/.github/workflows/api-unit-test.yaml index 37e339fc..cfaa25c5 100644 --- a/.github/workflows/api-unit-test.yaml +++ b/.github/workflows/api-unit-test.yaml @@ -1,6 +1,3 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - name: api check + unit test on: @@ -33,14 +30,12 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip + python -m pip install -U pip poetry pip install virtualenv make install-deps - - name: Lint with flake8 - run: | - make lint + - name: Lint with ruff + run: make lint - name: Unit test - run: | - make test + run: make test diff --git a/.gitignore b/.gitignore index 50e0d41d..ed327c64 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# for database as json files +api/ossdb/users/data/ +api/ossdb/projects/data/ + .history .idea .vscode @@ -6,6 +10,7 @@ *venv .secrets .secrets-env +.env #vim .*sw* diff --git a/api/.env.dist b/api/.env.dist new file mode 100644 index 00000000..34a7f946 --- /dev/null +++ b/api/.env.dist @@ -0,0 +1,28 @@ +DEBUG = true + +# ~~~~~~ database stuff below ~~~~~~ + +DB_HOST = localhost +DB_PORT = 5432 + +# those vars are used by the init script of postgresql service +# ONLY FOR DEV +OSS_WEBSITE_APP_USER = "oss_website" +OSS_WEBSITE_APP_PASSWORD = "password" +OSS_WEBSITE_APP_DATABASE = "oss_website" + +OSS_WEBSITE_SCHEMA = "oss_website" + +OSS_WEBSITE_ADMIN_USER = "oss_website_admin" +OSS_WEBSITE_ADMIN_PASSWORD = "password" + +# Those variables will be used in production environment +DATABASE_URL = "${OSS_WEBSITE_APP_USER}:${OSS_WEBSITE_APP_PASSWORD}@${DB_HOST}:${DB_PORT}/${OSS_WEBSITE_APP_DATABASE}" +DATABASE_URL_ADMIN = "${OSS_WEBSITE_ADMIN_USER}:${OSS_WEBSITE_ADMIN_PASSWORD}@${DB_HOST}:${DB_PORT}/${OSS_WEBSITE_APP_DATABASE}" + +# Auth options +TESTING = true + +# Twitter stuffs +TWITTER_API_KEY = "---------------------------" +TWITTER_API_SECRET_KEY = "---------------------------" diff --git a/api/.flake8 b/api/.flake8 deleted file mode 100644 index 9c9f8f13..00000000 --- a/api/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -ignore = E226,E302,E41 -max-line-length = 160 -exclude = tests/*,./venv/* -max-complexity = 10 diff --git a/api/Dockerfile b/api/Dockerfile index ef5e5ef3..3c8aa91f 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,12 +1,13 @@ -FROM python:3.8-slim-buster +FROM python:3.11-slim-buster -RUN apt update && apt install git gcc -y -ENV PYHTONUNBUFFERED 1 -RUN mkdir /code -WORKDIR /code -ADD ./requirements.txt ./ -RUN pip install -r requirements.txt +WORKDIR /app -COPY . /code +COPY pyproject.toml poetry.lock ./ +RUN pip install -U pip poetry && poetry install -CMD [ "python", "manage.py", "run" ] +COPY . /app + +ENTRYPOINT ["/bin/sh", "-c"] + +# TODO: Use gunicorn for prod and add a gunicorn config file that can be overriden by a ConfigMap +CMD ["uvicorn oss_website.main:app --host 0.0.0.0 --port 80"] diff --git a/api/Makefile b/api/Makefile index 5e70e323..d2368675 100644 --- a/api/Makefile +++ b/api/Makefile @@ -1,39 +1,63 @@ .DEFAULT_GOAL=help - +SHELL := /bin/bash CONFIG_FILE=./config.txt VENVPATH=venv -PYTHON=$(VENVPATH)/bin/python3 +PY3=python3.11 +PYTHON=$(VENVPATH)/bin/$(PY3) + +# some dev targets for the code quality +RUFF=$(PYTHON) -m ruff app tests *.py +ISORT=$(PYTHON) -m isort app tests *.py +MYPY=$(PYTHON) -m mypy app tests *.py venv: $(VENVPATH)/bin/activate -$(VENVPATH)/bin/activate: requirements.txt - test -d $(VENVPATH) || virtualenv -p python3 $(VENVPATH); \ +$(VENVPATH)/bin/activate: poetry.lock + test -d $(VENVPATH) || virtualenv -p $(PY3) $(VENVPATH); \ . $(VENVPATH)/bin/activate; \ - pip install -r requirements.txt; \ + poetry install; \ touch $(VENVPATH)/bin/activate; $(CONFIG_FILE): - echo "adding config file..." + echo "Adding config file..." cp example.config.txt $(CONFIG_FILE) -##install-deps: setup your dev environment -install-deps: venv $(CONFIG_FILE) +##install: setup your dev environment +install: venv $(CONFIG_FILE) ##run: run the api locally -run: install-deps - GOOGLE_APPLICATION_CREDENTIALS=.secrets/service-account.json $(PYTHON) manage.py run +run: install + $(PYTHON) manage.py run + +##format: Reformat project code. +format: venv + ${RUFF} --fix + ${ISORT} +##lint: Lint project code and check it with mypy lint: venv - $(PYTHON) -m flake8 . --show-source --statistics + ${RUFF} + ${ISORT} --check-only --df + ${MYPY} ##test: test your code -test: install-deps lint +test: install lint $(PYTHON) -m pytest +##clean: remove the venv and residuals clean: - rm -rf $(VENVPATH) + rm -rf *.pyc + rm -rf {*/__pycache__,*/*__pycache__,*/*/*__pycache__} ##help: show help help : Makefile @sed -n 's/^##//p' $< -.PHONY : help venv install-deps test lint +##lock: sync dependency tree between pyproject.toml and poetry.lock +lock: poetry.lock + poetry lock --no-update + +##docker-build: build the api docker image +docker-build: + docker build -t osswebsite:latest -f ./Dockerfile . + +.PHONY : help venv run install-deps test lint docker-build diff --git a/api/README.md b/api/README.md index 1a800133..e0facbca 100644 --- a/api/README.md +++ b/api/README.md @@ -1,17 +1,20 @@ # Oss Cameroon WebSite Backend - ## Requirements - python (>2.7,<=3.8) +- poetry (>1.4 recommended) - make ## How to install/launch ```bash +# starts postgres server +docker-compose up -d pgadmin + make run # this will set your virtual env `make venv` -# this will install dependencies `make install-deps` +# this will install dependencies `make install` # and then run the backend... ``` diff --git a/api/api.ini b/api/api.ini deleted file mode 100644 index 5049f844..00000000 --- a/api/api.ini +++ /dev/null @@ -1,11 +0,0 @@ -[uwsgi] -module = wsgi:app - -master = true -processes = 5 - -socket = caparledev-api.sock -chmod-socket = 666 -vacuum = true - -die-on-term = true diff --git a/api/app/__init__.py b/api/app/__init__.py index 99eb85b8..e69de29b 100644 --- a/api/app/__init__.py +++ b/api/app/__init__.py @@ -1,22 +0,0 @@ -from flask_restplus import Api as TheAPI -from flask import Blueprint, url_for - -from app.main.controller.github_controller import api as github -from app.main.controller.twitter_controller import api as twitter - -blueprint = Blueprint('api', __name__) - - -class Api(TheAPI): - @property - def specs_url(self): - return url_for(self.endpoint('specs'), _external=True) - - -api = Api(blueprint, - title='CAPARLEDEV RESTPLUS API ', - version='1.0', - description='The Backend of the platform CaParleDev.') - -api.add_namespace(github) -api.add_namespace(twitter) diff --git a/api/app/main/__init__.py b/api/app/main/__init__.py index 85c17550..130d41f1 100755 --- a/api/app/main/__init__.py +++ b/api/app/main/__init__.py @@ -1,12 +1,5 @@ -from flask import Flask -from flask_cors import CORS +from fastapi import FastAPI -from app.main.config import config_by_name - -def create_app(config_name): - app = Flask(__name__) - CORS(app) - app.config.from_object(config_by_name[config_name]) - - return app +def create_app(): + return FastAPI() diff --git a/api/app/main/config.py b/api/app/main/config.py index 24b5b759..7e2b27f1 100755 --- a/api/app/main/config.py +++ b/api/app/main/config.py @@ -1,17 +1,16 @@ import os - basedir = os.path.abspath(os.path.dirname(__file__)) class Config: SECRET_KEY = os.getenv('SECRET_KEY', 'my_precious_secret_key') DEBUG = False - APP_HOST = os.getenv('APP_HOST', '0.0.0.0') - APP_PORT = os.getenv('APP_PORT', '8811') + # app envs + APP_HOST = os.getenv('APP_HOST', '0.0.0.0') + APP_PORT = os.getenv('APP_PORT', '8811') APP_VERSION = os.getenv('APP_VERSION', '0.0.1') - APP_NAME = os.getenv('APP_NAME', 'CaParleDev-WebSite') - + APP_NAME = os.getenv('APP_NAME', 'CaParleDev-WebSite') class DevelopmentConfig(Config): DEBUG = True diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 1bf6bf43..4dea5afc 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -1,297 +1,104 @@ -from flask_restplus import Resource, fields -from flask import request +from typing import Any -# from app.main.utils.decorator import * -from app.main.utils.dto import ApiDto -from app.main.utils.database.users import get_users, get_user -from app.main.utils.database.projects import get_projects, get_project -from app.main.utils.database.search_projects import ( - get_search_projects, - post_search_projects, -) - -from app.main.utils.database.search_users import ( - post_search_users, - get_search_users, -) +from fastapi import Request from app.main.utils.database.languages import get_languages - - -api = ApiDto.github_api +from app.main.utils.database.projects import get_project, get_projects +from app.main.utils.database.search_projects import (get_search_projects, + post_search_projects) +from app.main.utils.database.search_users import (get_search_users, + post_search_users) +from app.main.utils.database.users import get_user, get_users +from manage import app # Ex : /users?count= -@api.route("/users", methods=["GET"]) -class ApidtoUsers(Resource): - @api.doc( - "Get_all_users", - params={ - "count": "item count", - }, - ) - def get(self): - """This method will return all github users with filter""" - - count = request.args.get("count") - if count is not None: - count = int(count) - else: - count = 20 - - result = get_users(count=count) - - return result, result["code"] +@app.get("/users") +async def all_users(count: int=20) -> dict : + """This method will return all github users with filter""" + return get_users(count) # Ex : /users/elhmne -@api.route("/users/", methods=["GET"]) -class ApidtoUser(Resource): - @api.doc("Get_user_infos") - def get(self, user_name): - """This method will return a github user with more informations""" - - result = get_user(user_name) - return result, result["code"] +@app.get("/users/") +async def user_infos_username(user_name: str) -> dict : + """This method will return a github user with more informations""" + return get_user(user_name) # Ex : /users/search?query=&count=&page= -@api.route("/users/search", methods=["GET", "POST"]) -class ApidtoSearch(Resource): - @api.doc( - "Get_search_infos", - params={ - "query": "query string can be a user name", - "page": "page number", - "count": "item count", - }, +@app.get("/users/search") +async def search_users(query: str, count: int=20, page: int=1) -> dict : + """ + This request will return the list of users that + match the query string + """ + return get_search_users( + query=query, + count=count, + page=page ) - def get(self): - """This request will return the list of users that match the query string""" - query = request.args.get("query") - - count = request.args.get("count") - if count is not None: - count = int(count) - else: - count = 20 - page = request.args.get("page") - if page is not None: - page = int(page) - else: - page = 1 - result = get_search_users(query, count, page) - return result, result["code"] +@app.post("/users/search") +async def user_search_infos(request: Request) -> dict : + """This request will return all github users that matches search query field""" + request_json: dict[str, Any] = await request.json() or {} - user_model = api.model( - "User Model", - { - "query": fields.String( - description="search string", - help="Enter a search query string", - default="", - ), - "page": fields.Integer( - description="Page number", - default=1 - ), - "count": fields.Integer( - description="count of elements per page", - default=20 - ), - "sort_type": fields.String( - description="Sorting type [alphabetic, most_recent]", - help="Specify sorting type", - default="most_recent" - ), - }, + return post_search_users( + query=request_json.get("query", ""), + sort_type=request_json.get("sort_type", ""), + page=request_json.get("page", 1), + count=request_json.get("count", 20) ) - @api.expect(user_model) - @api.doc( - "Post_users_search_infos", - ) - def post(self): - """This request will return all github users that matches search query field""" - data = request.json - - # get query - query = data.get("query") - - # get count - count = data.get("count") - if count is not None: - count = int(count) - else: - count = 20 - - # get page - page = data.get("page") - if page is not None: - page = int(page) - else: - page = 1 - - # get sort_type - sort_type = data.get("sort_type") - if sort_type is None: - sort_type = "" - - result = post_search_users( - query, sort_type=sort_type, page=page, count=count - ) - return result, result["code"] - - -# Ex : /projects/node-openerp -@api.route("/projects/", methods=["GET"]) -class ApidtoProject(Resource): - @api.doc("Get_user_infos") - def get(self, project_name): - """This request will return a github project by name""" - - result = get_project(project_name) - return result, result["code"] - # Ex : /projects?count= -@api.route("/projects", methods=["GET"]) -class ApidtoProjects(Resource): - @api.doc( - "Get_all_projects", - params={ - "count": "item count", - }, - ) - def get(self): - """This request will return all github projects""" - - count = request.args.get("count") - if count is not None: - count = int(count) - else: - count = 20 +@app.get("/projects") +async def all_projects(count: int=20) -> dict : + """This request will return all github projects""" + return get_projects(count) - page = request.args.get("page") - if page is not None: - page = int(page) - else: - page = 1 - result = get_projects(count=count) - - return result, result["code"] +@app.get("/projects") +async def user_infos_project(project_name: str) -> dict : + """This request will return a github project by name""" + return get_project(project_name) # Ex : /projects/search?query=&count=&page= -@api.route("/projects/search", methods=["GET", "POST"]) -class ApidtoProjectsSearch(Resource): - @api.doc( - "Get_search_infos", - params={ - "query": "query string", - "count": "item count", - "page": "page number", - }, +@app.get("/projects/search") +async def project_search(query: str, count: int=20, page: int=1) -> dict : + """ + This request will return all github projects + that matches search query field + """ + return get_search_projects( + query=query, + count=count, + page=page ) - def get(self): - """This request will return all github projects that matches search query field""" - query = request.args.get("query") - - count = request.args.get("count") - if count is not None: - count = int(count) - else: - count = 20 - - page = request.args.get("page") - if page is not None: - page = int(page) - else: - page = 1 - - result = get_search_projects(query, count, page) - return result, result["code"] - project_model = api.model( - "Project Model", - { - "query": fields.String( - description="search string", - help="Enter a search query string", - default="", - ), - "page": fields.Integer( - description="Page number", - default=1 - ), - "count": fields.Integer( - description="count of elements per page", - default=20 - ), - "languages": fields.List( - cls_or_instance=fields.String, - description="list of languages", - help="Specify a list of languages", - default=["javascript", "java", "c", "c++"] - ), - "sort_type": fields.String( - description="Sorting type [alphabetic, popularity, most_recent]", - help="Specify sorting type", - default="most_recent" - ), - }, - ) - @api.expect(project_model) - @api.doc( - "Post_project_search_infos", +@app.post("/projects/search") +async def project_search_infos(request: Request) -> dict : + """ + This request will return all github projects + that matches search query field + """ + request_json = await request.json() + + return post_search_projects( + query=request_json.get("query", ""), + sort_type=request_json.get("sort_type", ""), + languages=request_json.get("languages", []), + page=request_json.get("page", 1), + count=request_json.get("count", 20) ) - def post(self): - """This request will return all github projects that matches search query field""" - data = request.json - - # get query - query = data.get("query") - - # get count - count = data.get("count") - if count is not None: - count = int(count) - else: - count = 20 - - # get page - page = data.get("page") - if page is not None: - page = int(page) - else: - page = 1 - - # get sort_type - sort_type = data.get("sort_type") - if sort_type is None: - sort_type = "" - - # get languages - languages = data.get("languages") - if languages is None: - languages = [] - - result = post_search_projects( - query, sort_type=sort_type, languages=languages, page=page, count=count - ) - return result, result["code"] # Ex : /languages -@api.route("/languages", methods=["GET"]) -class ApidtoLanguages(Resource): - @api.doc("Get_github_languages") - def get(self): - """This request will return a list of github languages""" - - result = get_languages() - return result, result["code"] +@app.get("/languages") +async def github_languages() -> dict : + """This request will return a list of github languages""" + return get_languages() diff --git a/api/app/main/controller/twitter_controller.py b/api/app/main/controller/twitter_controller.py index a1e30b48..9168bc46 100644 --- a/api/app/main/controller/twitter_controller.py +++ b/api/app/main/controller/twitter_controller.py @@ -1,32 +1,12 @@ -from flask import request -from flask_restplus import Resource -from app.main.utils.dto import ApiDto from app.main.utils.database.twitter.top_tweets import get_top_tweets from app.main.utils.helpers.cache import Cache +from manage import app - -api = ApiDto.twitter_api cache = Cache() # Ex : /top-tweets?count= # Default count is 6 -@api.route("/top-tweets", methods=["GET"]) -class ApidtoTopTweets(Resource): - @api.doc( - "Get_top_tweets", - params={ - "count": "item count" - } - ) - def get(self): - """This method will return all top tweets""" - - count = request.args.get("count") - if count is not None: - count = int(count) - else: - count = 6 - - result = get_top_tweets(cache, count) - - return result, result["code"] +@app.get("/top-tweets") +def top_tweets(count: int=6) -> dict: + """This method will return all top tweets""" + return get_top_tweets(cache, count) diff --git a/api/app/main/utils/converters.py b/api/app/main/utils/converters.py index fbf25b8f..3ad9024d 100644 --- a/api/app/main/utils/converters.py +++ b/api/app/main/utils/converters.py @@ -1,5 +1,6 @@ import datetime + def convert_datetime_fields_to_string(data: dict): """ this function converts top level field of type datetime diff --git a/api/app/main/utils/database/projects.py b/api/app/main/utils/database/projects.py index 3a9d8b52..11d5fcb4 100644 --- a/api/app/main/utils/database/projects.py +++ b/api/app/main/utils/database/projects.py @@ -1,7 +1,7 @@ # database utils functions -from app.main.utils.database import storage from app.main.utils import converters +from app.main.utils.database import storage def sanitize_project_data(data): @@ -39,8 +39,11 @@ def get_one_page_of_projects(cursor=None, limit: int = 20): def get_projects(count: int = 20): """ - get_users [this function fetch open source projects from the database] - the count of items returned by this function can be limited to the size of data the datastore is able to return + get_users [this function fetch open + source projects from the database] + the count of items returned by this function + can be limited to the size of data + the datastore is able to return @params : count @returns : - code : the status code of the request diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index 63570fa9..01046299 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -1,31 +1,32 @@ # database utils functions -from app.main.utils.database import storage -from app.settings import MEILISEARCH_MASTER_KEY, MEILISEARCH_HOST -import meilisearch -import datetime -import time +import asyncpg -SORT_TYPE_POPULARITY = "popularity" -SORT_TYPE_MOST_RECENT = "most_recent" -SORT_TYPE_ALPHABETIC = "alphabetic" +from app.settings import (OSS_WEBSITE_APP_DATABASE, OSS_WEBSITE_APP_HOST, + OSS_WEBSITE_APP_PASSWORD, OSS_WEBSITE_APP_PORT, + OSS_WEBSITE_APP_USER) -def get_search_projects(query: str, count: int = 20, page: int = 1): - """ - get_search_users [this method search for users in our datastore - @params : query, count, page - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ +async def create_pool(): + return await asyncpg.create_pool( + user=OSS_WEBSITE_APP_USER, + password=OSS_WEBSITE_APP_PASSWORD, + database=OSS_WEBSITE_APP_DATABASE, + host=OSS_WEBSITE_APP_HOST, + port=OSS_WEBSITE_APP_PORT + ) +async def get_search_projects(pool, query: str, count: int = 20, page: int = 1): offset = (page - 1) * count + conn = await pool.acquire() + + try: + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name LIKE $1 LIMIT $2 OFFSET $3', + f"%{query}%", count, offset + ) + finally: + await pool.release(conn) - client = meilisearch.Client(MEILISEARCH_HOST, MEILISEARCH_MASTER_KEY) - index = client.get_index(storage.KIND_PROJECTS) - ret = index.search( - storage.KIND_PROJECTS, {"q": query, "limit": count, "offset": offset} - ) if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} @@ -37,97 +38,47 @@ def get_search_projects(query: str, count: int = 20, page: int = 1): return response - -def build_project_filters(languages): - if len(languages) <= 0: - return "" - - langs = "" - len_lang = len(languages) - for i in range(len_lang): - langs += f'language = "{languages[i]}"' - if i != len_lang - 1: - langs += " OR " - filters = langs - return filters - - -def alphabetic_sort(item): - return item.get("name").lower() - - -def popularity_sort(item): - return item.get("stargazers_count") - - -def most_recent_sort(item): - # convert created_at to timestamp in second - date_str = item.get("created_at") - date = datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") - time_tuple = date.timetuple() - return time.mktime(time_tuple) - - -def sort_result_by(sort_type: str, items: list = []): - if sort_type == SORT_TYPE_ALPHABETIC: - items.sort(key=alphabetic_sort) - elif sort_type == SORT_TYPE_MOST_RECENT: - items.sort(key=most_recent_sort, reverse=True) - elif sort_type == SORT_TYPE_POPULARITY: - items.sort(key=popularity_sort, reverse=True) - - return items - - -def post_search_projects( +async def post_search_projects( + pool, query: str, - languages: list = [], + languages: list[str] = [], sort_type: str = "", count: int = 20, - page: int = 1, + page: int = 1 ): - """ - post_search_users [this method search for users in our datastore - - @params : query, count, page - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ - offset = (page - 1) * count - filters = build_project_filters(languages) - client = meilisearch.Client(MEILISEARCH_HOST, MEILISEARCH_MASTER_KEY) - index = client.get_index(storage.KIND_PROJECTS) + conn = await pool.acquire() + + try: + if sort_type == 'alphabetic': + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name LIKE $1 AND ' + 'language = ANY($2) ORDER BY name LIMIT $3 OFFSET $4', + f"%{query}%", languages, count, offset + ) + elif sort_type == 'most_recent': + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name LIKE $1 AND ' + 'language = ANY($2) ORDER BY created_at DESC LIMIT $3 OFFSET $4', + f"%{query}%", languages, count, offset + ) + elif sort_type == 'popularity': + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name LIKE $1 AND ' + 'language = ANY($2) ORDER BY stargazers_count DESC LIMIT $3 OFFSET $4', + f"%{query}%", languages, count, offset + ) + else: + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name LIKE $1 AND ' + 'language = ANY($2) LIMIT $3 OFFSET $4', + f"%{query}%", languages, count, offset + ) + finally: + await pool.release(conn) - # if sort_type is not specified or not supported - if sort_type not in [ - SORT_TYPE_ALPHABETIC, - SORT_TYPE_MOST_RECENT, - SORT_TYPE_POPULARITY, - ]: - query_object = {"q": query, "limit": count, "offset": offset} - if filters != "": - query_object["filters"] = filters - ret = index.search( - storage.KIND_PROJECTS, - query_object, - ) - if not ret or len(ret) < 1: - return {"code": 400, "reason": "nothing found"} - ret["hits"] = sort_result_by(sort_type, ret["hits"]) - # if sort_type is specified we fetch every single elements and sort them handle the pagination on the application level - else: - query_object = {"q": query, "limit": 1000} - if filters != "": - query_object["filters"] = filters - ret = index.search(storage.KIND_PROJECTS, query_object) - if not ret or len(ret) < 1: - return {"code": 400, "reason": "nothing found"} - ret["hits"] = sort_result_by(sort_type, ret["hits"]) - ret["hits"] = ret["hits"][offset:offset + count] - ret["offset"] = offset - ret["limit"] = count + if not ret or len(ret) < 1: + return {"code": 400, "reason": "nothing found"} response = { "code": 200, diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index b4524162..101e7e8a 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -1,31 +1,32 @@ # database utils functions -from app.main.utils.database import storage -from app.settings import MEILISEARCH_MASTER_KEY, MEILISEARCH_HOST -import meilisearch -import datetime -import time +import asyncpg +from app.settings import (OSS_WEBSITE_APP_DATABASE, OSS_WEBSITE_APP_HOST, + OSS_WEBSITE_APP_PASSWORD, OSS_WEBSITE_APP_PORT, + OSS_WEBSITE_APP_USER) -SORT_TYPE_MOST_RECENT = "most_recent" -SORT_TYPE_ALPHABETIC = "alphabetic" -def get_search_users(query: str, count: int = 20, page: int = 1): - """ - get_search_users [this method search for users in our datastore - - @params : query, count, page - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ +async def create_pool(): + return await asyncpg.create_pool( + user=OSS_WEBSITE_APP_USER, + password=OSS_WEBSITE_APP_PASSWORD, + database=OSS_WEBSITE_APP_DATABASE, + host=OSS_WEBSITE_APP_HOST, + port=OSS_WEBSITE_APP_PORT + ) +async def get_search_users(pool, query: str, count: int = 20, page: int = 1): offset = (page - 1) * count + conn = await pool.acquire() + + try: + ret = await conn.fetch( + 'SELECT * FROM users WHERE name LIKE $1 LIMIT $2 OFFSET $3', + f"%{query}%", count, offset + ) + finally: + await pool.release(conn) - client = meilisearch.Client(MEILISEARCH_HOST, MEILISEARCH_MASTER_KEY) - index = client.get_index(storage.KIND_USERS) - ret = index.search( - storage.KIND_USERS, {"q": query, "limit": count, "offset": offset} - ) if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} @@ -37,69 +38,40 @@ def get_search_users(query: str, count: int = 20, page: int = 1): return response -def alphabetic_sort(item): - return item.get("login").lower() - - -def most_recent_sort(item): - # convert created_at to timestamp in second - date_str = item.get("created_at") - date = datetime.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") - time_tuple = date.timetuple() - return time.mktime(time_tuple) - - -def sort_result_by(sort_type: str, items: list = []): - if sort_type == SORT_TYPE_ALPHABETIC: - items.sort(key=alphabetic_sort) - elif sort_type == SORT_TYPE_MOST_RECENT: - items.sort(key=most_recent_sort, reverse=True) - - return items - - -def post_search_users( +async def post_search_users( + pool, query: str, sort_type: str = "", count: int = 20, - page: int = 1, + page: int = 1 ): - """ - post_search_users [this method search for users in our datastore - - @params : query, count, page - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ - offset = (page - 1) * count - client = meilisearch.Client(MEILISEARCH_HOST, MEILISEARCH_MASTER_KEY) - index = client.get_index(storage.KIND_USERS) + conn = await pool.acquire() + + try: + if sort_type == 'alphabetic': + ret = await conn.fetch( + 'SELECT * FROM users WHERE name LIKE $1 ' + 'ORDER BY name LIMIT $2 OFFSET $3', + f"%{query}%", count, offset + ) + elif sort_type == 'most_recent': + ret = await conn.fetch( + 'SELECT * FROM users WHERE name LIKE $1 ' + 'ORDER BY created_at DESC LIMIT $2 OFFSET $3', + f"%{query}%", count, offset + ) + else: + ret = await conn.fetch( + 'SELECT * FROM users WHERE name LIKE $1 ' + 'LIMIT $2 OFFSET $3', + f"%{query}%", count, offset + ) + finally: + await pool.release(conn) - # if sort_type is not specified or not supported - if sort_type not in [ - SORT_TYPE_ALPHABETIC, - SORT_TYPE_MOST_RECENT, - ]: - query_object = {"q": query, "limit": count, "offset": offset} - ret = index.search( - storage.KIND_USERS, - query_object, - ) - if not ret or len(ret) < 1: - return {"code": 400, "reason": "nothing found"} - ret["hits"] = sort_result_by(sort_type, ret["hits"]) - # if sort_type is specified we fetch every single elements and sort them handle the pagination on the application level - else: - query_object = {"q": query, "limit": 1500} - ret = index.search(storage.KIND_USERS, query_object) - if not ret or len(ret) < 1: - return {"code": 400, "reason": "nothing found"} - ret["hits"] = sort_result_by(sort_type, ret["hits"]) - ret["hits"] = ret["hits"][offset:offset + count] - ret["offset"] = offset - ret["limit"] = count + if not ret or len(ret) < 1: + return {"code": 400, "reason": "nothing found"} response = { "code": 200, diff --git a/api/app/main/utils/database/storage.py b/api/app/main/utils/database/storage.py index 9459c768..924370e7 100644 --- a/api/app/main/utils/database/storage.py +++ b/api/app/main/utils/database/storage.py @@ -1,8 +1,5 @@ from google.cloud import datastore -# For the gcloud auth to work properly this env variable should be set -# GOOGLE_APPLICATION_CREDENTIALS=.secrets/service-account.json - KIND_USERS = "github_users" KIND_PROJECTS = "github_projects" __CLIENT = None diff --git a/api/app/main/utils/database/twitter/top_tweets.py b/api/app/main/utils/database/twitter/top_tweets.py index 4eec7fa7..8bac5c24 100644 --- a/api/app/main/utils/database/twitter/top_tweets.py +++ b/api/app/main/utils/database/twitter/top_tweets.py @@ -1,11 +1,14 @@ +from typing import Any + import requests from requests_oauthlib import OAuth1 -from app.settings import API_KEY, API_SECRET_KEY + +from app.main.utils.helpers.cache import Cache from app.main.utils.helpers.commons import get_trace -import json +from app.settings import API_KEY, API_SECRET_KEY -def top_tweets(cache: object, count: int): +def top_tweets(cache: Cache, count: int) -> dict[str, Any] | None: """ This method will return top-tweets comming from the request or just the cache @@ -14,7 +17,8 @@ def top_tweets(cache: object, count: int): return boolean telling if everything went well [top-tweets] as string """ - if cache.get("top-tweets") is None: + # nothing inside top_tweets key + if not cache.get("top-tweets"): try: # we make another request # to the twitter api @@ -22,7 +26,10 @@ def top_tweets(cache: object, count: int): search_twitter_host = "https://api.twitter.com/1.1/search/tweets.json" tweets = requests.get( - "{}?q=%23caparledev%20-filter%3Aretweets&count={}".format(search_twitter_host, str(count)), + "{}?q=%23caparledev%20-filter%3Aretweets&count={}".format( + search_twitter_host, + str(count) + ), auth=OAuth1(API_KEY, API_SECRET_KEY) ).content.decode() @@ -31,14 +38,11 @@ def top_tweets(cache: object, count: int): except Exception: # We just print the trace-back here get_trace() - return (False, cache.get("top-tweets")) - else: - print("<< Getting from cache...") - return (True, cache.get("top-tweets")) + return cache.get("top-tweets") -def get_top_tweets(cache: object, count: int): +def get_top_tweets(cache: Cache, count: int) -> dict[str, Any]: """ This method will check the return of top-tweet and send the appropriate status code for the request @@ -46,23 +50,10 @@ def get_top_tweets(cache: object, count: int): """ results = top_tweets(cache, count) + error = True if results is None or "errors" in results else False - if results[0]: - payload = json.loads(results[1]) - if "errors" in payload: - return { - "code": 500, - "status": "error", - "result": payload - } - else: - return { - "code": 200, - "status": "success", - "result": payload - } - else: - return { - "code": 500, - "status": "error" - } + return { + "code": 500 if error else 200, + "status": "error" if error else "success", + "result": results if not error else {} + } diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index ba0576f5..384c85ca 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -1,7 +1,7 @@ # database utils functions -from app.main.utils.database import storage from app.main.utils import converters +from app.main.utils.database import storage def sanitize_user_data(data): @@ -27,7 +27,8 @@ def sanitize_array_of_user_data(data_arr: list): def get_users(count: int = 20): """ get_users [this function fetch dev users from the database] - the count of items returned by this function can be limited to the size of data the datastore is able to return + the count of items returned by this function can be limited + to the size of data the datastore is able to return @params : count @returns : - code : the status code of the request diff --git a/api/app/main/utils/helpers/cache.py b/api/app/main/utils/helpers/cache.py index 62a64fe3..0a235625 100644 --- a/api/app/main/utils/helpers/cache.py +++ b/api/app/main/utils/helpers/cache.py @@ -2,15 +2,16 @@ # with expiration life import time +from typing import Any -class Cache(object): +class Cache(object): _cache_ = {} VALUE = 0 EXPIRES = 1 @classmethod - def get(cls, key): + def get(cls, key: str) -> Any: """Get the value from the cache stored with 'key' if it exists""" try: if cls._cache_[key][cls.EXPIRES] > time.time(): @@ -22,8 +23,11 @@ def get(cls, key): return None @classmethod - def set(cls, key, value, duration=3600): - """Store/overwite a value in the cache with 'key' and an optional duration (seconds)""" + def set(cls, key, value, duration=3600) -> None: + """ + Store/overwite a value in the cache with 'key' and an optional + duration (seconds) + """ try: expires = time.time() + duration except TypeError: @@ -33,22 +37,12 @@ def set(cls, key, value, duration=3600): return cls.get(key) @classmethod - def clean(cls): + def clean(cls) -> None: """Remove all expired items from the cache""" for key in cls._cache_.keys(): cls.get(key) # Attempting to fetch an expired item deletes it @classmethod - def purge(cls): + def purge(cls) -> None: """Remove all items from the cache""" cls._cache_ = {} - - -if __name__ == "__main__": - c = Cache() - - c.set("key1", "value1", 30) - print(c.get("key1")) - - c.clean() - c.purge() diff --git a/api/app/settings.py b/api/app/settings.py index 3d7282e8..17c5b82a 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -1,36 +1,25 @@ # settings.py # All settings/parameter for the application -import configparser as cf import os +from typing import Any -# we read configuration from the config.txt file -conf = cf.RawConfigParser() -conf.read(r"config.txt") - -def get_conf(context: str, key: str, fallback: str = "") -> str: +def get_conf(key: str, fallback: Any = "") -> str: """ - A simple method to get a configuration parameter - from the configuration file - or just pick it on the os environment - - params : context, key + Get a conf .env param from OS or fallback on default + params : key return : value """ - value = "" - if context in conf: - value = conf.get(context, key, fallback="") - if value == "": - value = os.environ.get(key, default="") - if value == "": - return fallback - return value + return os.environ.get(key, default=fallback) -# meili configurations -MEILISEARCH_HOST = get_conf("meilisearch", "MEILISEARCH_HOST") -MEILISEARCH_MASTER_KEY = get_conf("meilisearch", "MEILISEARCH_MASTER_KEY") +# database configurations +OSS_WEBSITE_APP_DATABASE = get_conf('OSS_WEBSITE_APP_DATABASE', 'ossdb') +OSS_WEBSITE_APP_HOST = get_conf('OSS_WEBSITE_APP_HOST', "localhost") +OSS_WEBSITE_APP_PORT = get_conf('OSS_WEBSITE_APP_PORT', 5432) +OSS_WEBSITE_APP_USER = get_conf('OSS_WEBSITE_APP_USER', 'user') +OSS_WEBSITE_APP_PASSWORD = get_conf('OSS_WEBSITE_APP_PASSWORD', 'pwd') # Twitter configurations -API_KEY = get_conf("twitter", "API_KEY") -API_SECRET_KEY = get_conf("twitter", "API_SECRET_KEY") +TWITTER_API_KEY = get_conf("TWITTER_API_KEY") +TWITTER_API_SECRET_KEY = get_conf("TWITTER_API_SECRET_KEY") diff --git a/api/example.config.txt b/api/example.config.txt index 5a167dea..47cd1851 100644 --- a/api/example.config.txt +++ b/api/example.config.txt @@ -1,8 +1,10 @@ -[meilisearch] -MEILISEARCH_HOST = http://127.0.0.1:7700 -MEILISEARCH_MASTER_KEY = +[postgres] +OSS_WEBSITE_APP_DATABASE = ossdb +OSS_WEBSITE_APP_HOST = localhost +OSS_WEBSITE_APP_PASSWORD = pwd +OSS_WEBSITE_APP_PORT = 5432 +OSS_WEBSITE_APP_USER = user [twitter] API_KEY = ---------- API_SECRET_KEY = ---------- - diff --git a/api/manage.py b/api/manage.py index 85d48e4b..615532da 100644 --- a/api/manage.py +++ b/api/manage.py @@ -1,20 +1,14 @@ -from flask_script import Manager +import uvicorn -from app import blueprint from app.main import create_app -from app.main.config import app_port, app_host -app = create_app('dev') +app = create_app() -app.register_blueprint(blueprint) -app.app_context().push() - -manager = Manager(app) - -@manager.command -def run(): - app.run(host=app_host, port=app_port) - - -if __name__ == '__main__': - manager.run() +if __name__ == "__main__": + uvicorn.run( + "oss_website:app", + host="0.0.0.0", + port=8000, + log_level="info", + reload=True + ) diff --git a/api/ossdb/projects/export.sh b/api/ossdb/projects/export.sh new file mode 100644 index 00000000..ecb9c882 --- /dev/null +++ b/api/ossdb/projects/export.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +_export(){ + for i in {1..1500}; do + save_on="./data/projects_page_$i.json" + echo "Exporting projects, page $i" + + curl -Ls 'https://api.osscameroon.com/github/projects/search' -X POST \ + -H 'User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0' \ + -H 'Accept: application/json, text/plain, */*' \ + -H 'Accept-Language: en-US,en;q=0.5' \ + -H 'Accept-Encoding: gzip, deflate, br' -H 'Content-Type: application/json' \ + -H 'Origin: https://osscameroon.com' \ + -H 'Connection: keep-alive' \ + -H 'Referer: https://osscameroon.com/' \ + -H 'Sec-Fetch-Dest: empty' \ + -H 'Sec-Fetch-Mode: cors' \ + -H 'Sec-Fetch-Site: same-site' \ + --data-raw '{"query":"","page":'$i',"count":20,"languages":[],"sort_type":""}' | jq > $save_on; + + lines=$(wc -l $save_on) + echo ">> $lines "; + sleep 5; + done +} + +_export diff --git a/api/ossdb/projects/import.sh b/api/ossdb/projects/import.sh new file mode 100644 index 00000000..7c04182e --- /dev/null +++ b/api/ossdb/projects/import.sh @@ -0,0 +1,159 @@ +#!/bin/bash + +json_file="./data/projects_page_1.json" +db_host="localhost" +db_name="ossdb" +db_user="user" +db_password="pwd" + + +_import() { + projects=$(jq -c '.result.hits[]' "$json_file") + + while IFS= read -r project; do + id=$(echo "$project" | jq -r '.id') + milestones_url=$(echo "$project" | jq -r '.milestones_url') + assignees_url=$(echo "$project" | jq -r '.assignees_url') + notifications_url=$(echo "$project" | jq -r '.notifications_url') + full_name=$(echo "$project" | jq -r '.full_name') + subscribers_url=$(echo "$project" | jq -r '.subscribers_url') + issue_events_url=$(echo "$project" | jq -r '.issue_events_url') + teams_url=$(echo "$project" | jq -r '.teams_url') + issues_url=$(echo "$project" | jq -r '.issues_url') + has_projects=$(echo "$project" | jq -r '.has_projects') + contents_url=$(echo "$project" | jq -r '.contents_url') + updated_at=$(echo "$project" | jq -r '.updated_at') + has_downloads=$(echo "$project" | jq -r '.has_downloads') + disabled=$(echo "$project" | jq -r '.disabled') + watchers=$(echo "$project" | jq -r '.watchers') + node_id=$(echo "$project" | jq -r '.node_id') + description=$(echo "$project" | jq -r '.description') + merges_url=$(echo "$project" | jq -r '.merges_url') + homepage=$(echo "$project" | jq -r '.homepage') + forks_count=$(echo "$project" | jq -r '.forks_count') + permissions_pull=$(echo "$project" | jq -r '.permissions.pull') + permissions_admin=$(echo "$project" | jq -r '.permissions.admin') + permissions_push=$(echo "$project" | jq -r '.permissions.push') + keys_url=$(echo "$project" | jq -r '.keys_url') + forks_url=$(echo "$project" | jq -r '.forks_url') + open_issues_count=$(echo "$project" | jq -r '.open_issues_count') + comments_url=$(echo "$project" | jq -r '.comments_url') + language=$(echo "$project" | jq -r '.language') + has_pages=$(echo "$project" | jq -r '.has_pages') + trees_url=$(echo "$project" | jq -r '.trees_url') + branches_url=$(echo "$project" | jq -r '.branches_url') + archived=$(echo "$project" | jq -r '.archived') + subscription_url=$(echo "$project" | jq -r '.subscription_url') + labels_url=$(echo "$project" | jq -r '.labels_url') + license_key=$(echo "$project" | jq -r '.license.key') + license_name=$(echo "$project" | jq -r '.license.name') + license_spdx_id=$(echo "$project" | jq -r '.license.spdx_id') + license_url=$(echo "$project" | jq -r '.license.url') + has_issues=$(echo "$project" | jq -r '.has_issues') + git_refs_url=$(echo "$project" | jq -r '.git_refs_url') + forks=$(echo "$project" | jq -r '.forks') + issue_comment_url=$(echo "$project" | jq -r '.issue_comment_url') + size=$(echo "$project" | jq -r '.size') + languages_url=$(echo "$project" | jq -r '.languages_url') + blobs_url=$(echo "$project" | jq -r '.blobs_url') + html_url=$(echo "$project" | jq -r '.html_url') + open_issues=$(echo "$project" | jq -r '.open_issues') + ssh_url=$(echo "$project" | jq -r '.ssh_url') + contributors_url=$(echo "$project" | jq -r '.contributors_url') + has_wiki=$(echo "$project" | jq -r '.has_wiki') + releases_url=$(echo "$project" | jq -r '.releases_url') + git_commits_url=$(echo "$project" | jq -r '.git_commits_url') + owner_html_url=$(echo "$project" | jq -r '.owner.html_url') + owner_gravatar_id=$(echo "$project" | jq -r '.owner.gravatar_id') + owner_received_events_url=$(echo "$project" | jq -r '.owner.received_events_url') + owner_repos_url=$(echo "$project" | jq -r '.owner.repos_url') + owner_site_admin=$(echo "$project" | jq -r '.owner.site_admin') + owner_node_id=$(echo "$project" | jq -r '.owner.node_id') + owner_avatar_url=$(echo "$project" | jq -r '.owner.avatar_url') + owner_gists_url=$(echo "$project" | jq -r '.owner.gists_url') + owner_subscriptions_url=$(echo "$project" | jq -r '.owner.subscriptions_url') + owner_login=$(echo "$project" | jq -r '.owner.login') + owner_followers_url=$(echo "$project" | jq -r '.owner.followers_url') + owner_url=$(echo "$project" | jq -r '.owner.url') + owner_following_url=$(echo "$project" | jq -r '.owner.following_url') + owner_type=$(echo "$project" | jq -r '.owner.type') + owner_starred_url=$(echo "$project" | jq -r '.owner.starred_url') + owner_organizations_url=$(echo "$project" | jq -r '.owner.organizations_url') + owner_events_url=$(echo "$project" | jq -r '.owner.events_url') + owner_id=$(echo "$project" | jq -r '.owner.id') + default_branch=$(echo "$project" | jq -r '.default_branch') + fork=$(echo "$project" | jq -r '.fork') + compare_url=$(echo "$project" | jq -r '.compare_url') + mirror_url=$(echo "$project" | jq -r '.mirror_url') + commits_url=$(echo "$project" | jq -r '.commits_url') + git_tags_url=$(echo "$project" | jq -r '.git_tags_url') + archive_url=$(echo "$project" | jq -r '.archive_url') + clone_url=$(echo "$project" | jq -r '.clone_url') + svn_url=$(echo "$project" | jq -r '.svn_url') + tags_url=$(echo "$project" | jq -r '.tags_url') + events_url=$(echo "$project" | jq -r '.events_url') + statuses_url=$(echo "$project" | jq -r '.statuses_url') + project_url=$(echo "$project" | jq -r '.url') + stargazers_url=$(echo "$project" | jq -r '.stargazers_url') + downloads_url=$(echo "$project" | jq -r '.downloads_url') + private=$(echo "$project" | jq -r '.private') + stargazers_count=$(echo "$project" | jq -r '.stargazers_count') + deployments_url=$(echo "$project" | jq -r '.deployments_url') + git_url=$(echo "$project" | jq -r '.git_url') + collaborators_url=$(echo "$project" | jq -r '.collaborators_url') + created_at=$(echo "$project" | jq -r '.created_at') + name=$(echo "$project" | jq -r '.name') + watchers_count=$(echo "$project" | jq -r '.watchers_count') + pushed_at=$(echo "$project" | jq -r '.pushed_at') + hooks_url=$(echo "$project" | jq -r '.hooks_url') + pulls_url=$(echo "$project" | jq -r '.pulls_url') + allow_forking=$(echo "$project" | jq -r '.allow_forking') + visibility=$(echo "$project" | jq -r '.visibility') + is_template=$(echo "$project" | jq -r '.is_template') + topics=$(echo "$project" | jq -c -r '.topics[]') + + # Insert the data into the PostgreSQL database + psql -h "$db_host" -d "$db_name" -U "$db_user" -c " + INSERT INTO projects ( + id, milestones_url, assignees_url, notifications_url, + full_name, subscribers_url, issue_events_url, teams_url, issues_url, has_projects, + contents_url, updated_at, has_downloads, disabled, watchers, node_id, description, + merges_url, homepage, forks_count, permissions_pull, permissions_admin, permissions_push, + keys_url, forks_url, open_issues_count, comments_url, language, has_pages, trees_url, + branches_url, archived, subscription_url, labels_url, license_key, license_name, + license_spdx_id, license_url, has_issues, git_refs_url, forks, issue_comment_url, + size, languages_url, blobs_url, html_url, open_issues, ssh_url, contributors_url, + has_wiki, releases_url, git_commits_url, owner_html_url, owner_gravatar_id, + owner_received_events_url, owner_repos_url, owner_site_admin, owner_node_id, + owner_avatar_url, owner_gists_url, owner_subscriptions_url, owner_login, + owner_followers_url, owner_url, owner_following_url, owner_type, owner_starred_url, + owner_organizations_url, owner_events_url, owner_id, default_branch, fork, + compare_url, mirror_url, commits_url, git_tags_url, archive_url, clone_url, + svn_url, tags_url, events_url, statuses_url, project_url, stargazers_url, + downloads_url, private, stargazers_count, deployments_url, git_url, + collaborators_url, created_at, name, watchers_count, pushed_at, + hooks_url, pulls_url, allow_forking, visibility, is_template + ) + VALUES ( + '$id', '$milestones_url', '$assignees_url', '$notifications_url', '$full_name', '$subscribers_url', + '$issue_events_url', '$teams_url', '$issues_url', '$has_projects', '$contents_url', '$updated_at', + '$has_downloads', '$disabled', '$watchers', '$node_id', '$description', '$merges_url', '$homepage', + '$forks_count', '$permissions_pull', '$permissions_admin', '$permissions_push', '$keys_url', '$forks_url', + '$open_issues_count', '$comments_url', '$language', '$has_pages', '$trees_url', '$branches_url', + '$archived', '$subscription_url', '$labels_url', '$license_key', '$license_name', '$license_spdx_id', + '$license_url', '$has_issues', '$git_refs_url', '$forks', '$issue_comment_url', '$size', '$languages_url', + '$blobs_url', '$html_url', '$open_issues', '$ssh_url', '$contributors_url', '$has_wiki', '$releases_url', + '$git_commits_url', '$owner_html_url', '$owner_gravatar_id', '$owner_received_events_url', + '$owner_repos_url', '$owner_site_admin', '$owner_node_id', '$owner_avatar_url', '$owner_gists_url', + '$owner_subscriptions_url', '$owner_login', '$owner_followers_url', '$owner_url', '$owner_following_url', + '$owner_type', '$owner_starred_url', '$owner_organizations_url', '$owner_events_url', '$owner_id', + '$default_branch', '$fork', '$compare_url', '$mirror_url', '$commits_url', '$git_tags_url', '$archive_url', + '$clone_url', '$svn_url', '$tags_url', '$events_url', '$statuses_url', '$project_url', '$stargazers_url', + '$downloads_url', '$private', '$stargazers_count', '$deployments_url', '$git_url', '$collaborators_url', + '$created_at', '$name', '$watchers_count', '$pushed_at', '$hooks_url', '$pulls_url', '$allow_forking', + '$visibility', '$is_template' + ); + done <<< "$projects" +} + +_import diff --git a/api/ossdb/users/export.sh b/api/ossdb/users/export.sh new file mode 100644 index 00000000..5d9e15af --- /dev/null +++ b/api/ossdb/users/export.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +_export(){ + for i in {1..1500}; do + save_on="./data/users_page_$i.json" + echo "Exporting users, page $i" + + curl -Ls 'https://api.osscameroon.com/github/users/search' -X POST \ + -H 'User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0' \ + -H 'Accept: application/json, text/plain, */*' \ + -H 'Accept-Language: en-US,en;q=0.5' \ + -H 'Accept-Encoding: gzip, deflate, br' -H 'Content-Type: application/json' \ + -H 'Origin: https://osscameroon.com' \ + -H 'Connection: keep-alive' \ + -H 'Referer: https://osscameroon.com/' \ + -H 'Sec-Fetch-Dest: empty' \ + -H 'Sec-Fetch-Mode: cors' \ + -H 'Sec-Fetch-Site: same-site' \ + --data-raw '{"count":21,"page": '$i'}' | jq > $save_on; + + lines=$(wc -l $save_on) + echo ">> $lines "; + sleep 5; + done +} + +_export diff --git a/api/ossdb/users/import.sh b/api/ossdb/users/import.sh new file mode 100644 index 00000000..ae098088 --- /dev/null +++ b/api/ossdb/users/import.sh @@ -0,0 +1,67 @@ +#!/bin/bash + +json_file="user_page_1.json" +db_host="localhost" +db_name="ossdb" +db_user="user" +db_password="pwd" + + +_import() { + users=$(jq -c '.result.hits[]' "$json_file") + + while IFS= read -r user; do + id=$(echo "$user" | jq -r '.id') + avatar_url=$(echo "$user" | jq -r '.avatar_url') + name=$(echo "$user" | jq -r '.name') + following=$(echo "$user" | jq -r '.following') + bio=$(echo "$user" | jq -r '.bio') + node_id=$(echo "$user" | jq -r '.node_id') + following_url=$(echo "$user" | jq -r '.following_url') + url=$(echo "$user" | jq -r '.url') + type=$(echo "$user" | jq -r '.type') + starred_url=$(echo "$user" | jq -r '.starred_url') + followers=$(echo "$user" | jq -r '.followers') + site_admin=$(echo "$user" | jq -r '.site_admin') + location=$(echo "$user" | jq -r '.location') + twitter_username=$(echo "$user" | jq -r '.twitter_username') + organizations_url=$(echo "$user" | jq -r '.organizations_url') + public_gists=$(echo "$user" | jq -r '.public_gists') + repos_url=$(echo "$user" | jq -r '.repos_url') + email=$(echo "$user" | jq -r '.email') + company=$(echo "$user" | jq -r '.company') + received_events_url=$(echo "$user" | jq -r '.received_events_url') + html_url=$(echo "$user" | jq -r '.html_url') + public_repos=$(echo "$user" | jq -r '.public_repos') + login=$(echo "$user" | jq -r '.login') + subscriptions_url=$(echo "$user" | jq -r '.subscriptions_url') + blog=$(echo "$user" | jq -r '.blog') + updated_at=$(echo "$user" | jq -r '.updated_at') + gravatar_id=$(echo "$user" | jq -r '.gravatar_id') + gists_url=$(echo "$user" | jq -r '.gists_url') + created_at=$(echo "$user" | jq -r '.created_at') + events_url=$(echo "$user" | jq -r '.events_url') + hireable=$(echo "$user" | jq -r '.hireable') + followers_url=$(echo "$user" | jq -r '.followers_url') + + # On insert les datas avec psql dans notre postgres + psql -h "$db_host" -d "$db_name" -U "$db_user" -c " + INSERT INTO users ( + id, avatar_url, name, following, bio, node_id, following_url, url, type, starred_url, + followers, site_admin, location, twitter_username, organizations_url, public_gists, + repos_url, email, company, received_events_url, html_url, public_repos, login, + subscriptions_url, blog, updated_at, gravatar_id, gists_url, created_at, events_url, + hireable, followers_url + ) + VALUES ( + '$id', '$avatar_url', '$name', '$following', '$bio', '$node_id', '$following_url', '$url', '$type', '$starred_url', + '$followers', '$site_admin', '$location', '$twitter_username', '$organizations_url', '$public_gists', + '$repos_url', '$email', '$company', '$received_events_url', '$html_url', '$public_repos', '$login', + '$subscriptions_url', '$blog', '$updated_at', '$gravatar_id', '$gists_url', '$created_at', '$events_url', + '$hireable', '$followers_url' + ); + " + done <<< "$users" +} + +_import diff --git a/api/poetry.lock b/api/poetry.lock new file mode 100644 index 00000000..7a222d38 --- /dev/null +++ b/api/poetry.lock @@ -0,0 +1,1251 @@ +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. + +[[package]] +name = "anyio" +version = "3.7.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, + {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, +] + +[package.dependencies] +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (<0.22)"] + +[[package]] +name = "asyncpg" +version = "0.27.0" +description = "An asyncio PostgreSQL driver" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "asyncpg-0.27.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fca608d199ffed4903dce1bcd97ad0fe8260f405c1c225bdf0002709132171c2"}, + {file = "asyncpg-0.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20b596d8d074f6f695c13ffb8646d0b6bb1ab570ba7b0cfd349b921ff03cfc1e"}, + {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a6206210c869ebd3f4eb9e89bea132aefb56ff3d1b7dd7e26b102b17e27bbb1"}, + {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7a94c03386bb95456b12c66026b3a87d1b965f0f1e5733c36e7229f8f137747"}, + {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bfc3980b4ba6f97138b04f0d32e8af21d6c9fa1f8e6e140c07d15690a0a99279"}, + {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9654085f2b22f66952124de13a8071b54453ff972c25c59b5ce1173a4283ffd9"}, + {file = "asyncpg-0.27.0-cp310-cp310-win32.whl", hash = "sha256:879c29a75969eb2722f94443752f4720d560d1e748474de54ae8dd230bc4956b"}, + {file = "asyncpg-0.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab0f21c4818d46a60ca789ebc92327d6d874d3b7ccff3963f7af0a21dc6cff52"}, + {file = "asyncpg-0.27.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:18f77e8e71e826ba2d0c3ba6764930776719ae2b225ca07e014590545928b576"}, + {file = "asyncpg-0.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2232d4625c558f2aa001942cac1d7952aa9f0dbfc212f63bc754277769e1ef2"}, + {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a3a4ff43702d39e3c97a8786314123d314e0f0e4dabc8367db5b665c93914de"}, + {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccddb9419ab4e1c48742457d0c0362dbdaeb9b28e6875115abfe319b29ee225d"}, + {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:768e0e7c2898d40b16d4ef7a0b44e8150db3dd8995b4652aa1fe2902e92c7df8"}, + {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609054a1f47292a905582a1cfcca51a6f3f30ab9d822448693e66fdddde27920"}, + {file = "asyncpg-0.27.0-cp311-cp311-win32.whl", hash = "sha256:8113e17cfe236dc2277ec844ba9b3d5312f61bd2fdae6d3ed1c1cdd75f6cf2d8"}, + {file = "asyncpg-0.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb71211414dd1eeb8d31ec529fe77cff04bf53efc783a5f6f0a32d84923f45cf"}, + {file = "asyncpg-0.27.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4750f5cf49ed48a6e49c6e5aed390eee367694636c2dcfaf4a273ca832c5c43c"}, + {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:eca01eb112a39d31cc4abb93a5aef2a81514c23f70956729f42fb83b11b3483f"}, + {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5710cb0937f696ce303f5eed6d272e3f057339bb4139378ccecafa9ee923a71c"}, + {file = "asyncpg-0.27.0-cp37-cp37m-win_amd64.whl", hash = "sha256:71cca80a056ebe19ec74b7117b09e650990c3ca535ac1c35234a96f65604192f"}, + {file = "asyncpg-0.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4bb366ae34af5b5cabc3ac6a5347dfb6013af38c68af8452f27968d49085ecc0"}, + {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16ba8ec2e85d586b4a12bcd03e8d29e3d99e832764d6a1d0b8c27dbbe4a2569d"}, + {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d20dea7b83651d93b1eb2f353511fe7fd554752844523f17ad30115d8b9c8cd6"}, + {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e56ac8a8237ad4adec97c0cd4728596885f908053ab725e22900b5902e7f8e69"}, + {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf21ebf023ec67335258e0f3d3ad7b91bb9507985ba2b2206346de488267cad0"}, + {file = "asyncpg-0.27.0-cp38-cp38-win32.whl", hash = "sha256:69aa1b443a182b13a17ff926ed6627af2d98f62f2fe5890583270cc4073f63bf"}, + {file = "asyncpg-0.27.0-cp38-cp38-win_amd64.whl", hash = "sha256:62932f29cf2433988fcd799770ec64b374a3691e7902ecf85da14d5e0854d1ea"}, + {file = "asyncpg-0.27.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fddcacf695581a8d856654bc4c8cfb73d5c9df26d5f55201722d3e6a699e9629"}, + {file = "asyncpg-0.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d8585707ecc6661d07367d444bbaa846b4e095d84451340da8df55a3757e152"}, + {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:975a320baf7020339a67315284a4d3bf7460e664e484672bd3e71dbd881bc692"}, + {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2232ebae9796d4600a7819fc383da78ab51b32a092795f4555575fc934c1c89d"}, + {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:88b62164738239f62f4af92567b846a8ef7cf8abf53eddd83650603de4d52163"}, + {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eb4b2fdf88af4fb1cc569781a8f933d2a73ee82cd720e0cb4edabbaecf2a905b"}, + {file = "asyncpg-0.27.0-cp39-cp39-win32.whl", hash = "sha256:8934577e1ed13f7d2d9cea3cc016cc6f95c19faedea2c2b56a6f94f257cea672"}, + {file = "asyncpg-0.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b6499de06fe035cf2fa932ec5617ed3f37d4ebbf663b655922e105a484a6af9"}, + {file = "asyncpg-0.27.0.tar.gz", hash = "sha256:720986d9a4705dd8a40fdf172036f5ae787225036a7eb46e704c45aa8f62c054"}, +] + +[package.extras] +dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "flake8 (>=5.0.4,<5.1.0)", "pytest (>=6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2023.5.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "configparser" +version = "5.3.0" +description = "Updated configparser from stdlib for earlier Pythons." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, + {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastapi" +version = "0.95.2" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fastapi-0.95.2-py3-none-any.whl", hash = "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f"}, + {file = "fastapi-0.95.2.tar.gz", hash = "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982"}, +] + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = ">=0.27.0,<0.28.0" + +[package.extras] +all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.21.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer-cli (>=0.0.13,<0.0.14)", "typer[all] (>=0.6.1,<0.8.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6.5.0,<8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.7)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<1.4.43)", "types-orjson (==3.6.2)", "types-ujson (==5.7.0.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] + +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httptools" +version = "0.5.0" +description = "A collection of framework independent HTTP protocol utils." +category = "main" +optional = false +python-versions = ">=3.5.0" +files = [ + {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"}, + {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"}, + {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"}, + {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"}, + {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"}, + {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"}, + {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"}, + {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"}, + {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"}, + {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"}, + {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"}, + {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"}, + {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"}, + {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"}, + {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"}, + {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"}, + {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"}, + {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"}, + {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"}, + {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"}, + {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"}, + {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"}, + {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"}, + {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"}, + {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"}, + {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"}, + {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"}, + {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"}, + {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"}, + {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"}, + {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"}, + {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"}, + {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"}, + {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"}, + {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"}, + {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"}, + {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"}, + {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"}, + {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"}, + {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"}, + {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "libcst" +version = "0.4.9" +description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "libcst-0.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f9e42085c403e22201e5c41e707ef73e4ea910ad9fc67983ceee2368097f54e"}, + {file = "libcst-0.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1266530bf840cc40633a04feb578bb4cac1aa3aea058cc3729e24eab09a8e996"}, + {file = "libcst-0.4.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9679177391ccb9b0cdde3185c22bf366cb672457c4b7f4031fcb3b5e739fbd6"}, + {file = "libcst-0.4.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d67bc87e0d8db9434f2ea063734938a320f541f4c6da1074001e372f840f385d"}, + {file = "libcst-0.4.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e316da5a126f2a9e1d7680f95f907b575f082a35e2f8bd5620c59b2aaaebfe0a"}, + {file = "libcst-0.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:7415569ab998a85b0fc9af3a204611ea7fadb2d719a12532c448f8fc98f5aca4"}, + {file = "libcst-0.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:15ded11ff7f4572f91635e02b519ae959f782689fdb4445bbebb7a3cc5c71d75"}, + {file = "libcst-0.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b266867b712a120fad93983de432ddb2ccb062eb5fd2bea748c9a94cb200c36"}, + {file = "libcst-0.4.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045b3b0b06413cdae6e9751b5f417f789ffa410f2cb2815e3e0e0ea6bef10ec0"}, + {file = "libcst-0.4.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e799add8fba4976628b9c1a6768d73178bf898f0ed1bd1322930c2d3db9063ba"}, + {file = "libcst-0.4.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10479371d04ee8dc978c889c1774bbf6a83df88fa055fcb0159a606f6679c565"}, + {file = "libcst-0.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:7a98286cbbfa90a42d376900c875161ad02a5a2a6b7c94c0f7afd9075e329ce4"}, + {file = "libcst-0.4.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:400166fc4efb9aa06ce44498d443aa78519082695b1894202dd73cd507d2d712"}, + {file = "libcst-0.4.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46123863fba35cc84f7b54dd68826419cabfd9504d8a101c7fe3313ea03776f9"}, + {file = "libcst-0.4.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27be8db54c0e5fe440021a771a38b81a7dbc23cd630eb8b0e9828b7717f9b702"}, + {file = "libcst-0.4.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:132bec627b064bd567e7e4cd6c89524d02842151eb0d8f5f3f7ffd2579ec1b09"}, + {file = "libcst-0.4.9-cp37-cp37m-win_amd64.whl", hash = "sha256:596860090aeed3ee6ad1e59c35c6c4110a57e4e896abf51b91cae003ec720a11"}, + {file = "libcst-0.4.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4487608258109f774300466d4ca97353df29ae6ac23d1502e13e5509423c9d5"}, + {file = "libcst-0.4.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa53993e9a2853efb3ed3605da39f2e7125df6430f613eb67ef886c1ce4f94b5"}, + {file = "libcst-0.4.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6ce794483d4c605ef0f5b199a49fb6996f9586ca938b7bfef213bd13858d7ab"}, + {file = "libcst-0.4.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786e562b54bbcd17a060d1244deeef466b7ee07fe544074c252c4a169e38f1ee"}, + {file = "libcst-0.4.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794250d2359edd518fb698e5d21c38a5bdfc5e4a75d0407b4c19818271ce6742"}, + {file = "libcst-0.4.9-cp38-cp38-win_amd64.whl", hash = "sha256:76491f67431318c3145442e97dddcead7075b074c59eac51be7cc9e3fffec6ee"}, + {file = "libcst-0.4.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cf48d7aec6dc54b02aec0b1bb413c5bb3b02d852fd6facf1f05c7213e61a176"}, + {file = "libcst-0.4.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b3348c6b7711a5235b133bd8e11d22e903c388db42485b8ceb5f2aa0fae9b9f"}, + {file = "libcst-0.4.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e33b66762efaa014c38819efae5d8f726dd823e32d5d691035484411d2a2a69"}, + {file = "libcst-0.4.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1350d375d3fb9b20a6cf10c09b2964baca9be753a033dde7c1aced49d8e58387"}, + {file = "libcst-0.4.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3822056dc13326082362db35b3f649e0f4a97e36ddb4e487441da8e0fb9db7b3"}, + {file = "libcst-0.4.9-cp39-cp39-win_amd64.whl", hash = "sha256:183636141b839aa35b639e100883813744523bc7c12528906621121731b28443"}, + {file = "libcst-0.4.9.tar.gz", hash = "sha256:01786c403348f76f274dbaf3888ae237ffb73e6ed6973e65eba5c1fc389861dd"}, +] + +[package.dependencies] +pyyaml = ">=5.2" +typing-extensions = ">=3.7.4.2" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["Sphinx (>=5.1.1)", "black (==22.10.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8,<5)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.1.2)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.14)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=1.5.2)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==2.0.1)", "usort (==1.0.5)"] + +[[package]] +name = "mypy" +version = "1.3.0" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, + {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, + {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, + {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, + {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, + {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, + {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, + {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, + {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, + {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, + {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, + {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, + {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, + {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, + {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, + {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, + {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, + {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, + {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.8" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, + {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, + {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, + {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, + {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, + {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, + {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, + {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, + {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, + {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, + {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, + {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, + {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, + {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, + {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, + {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, + {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, + {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, + {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, + {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, + {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, + {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, + {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.10.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, + {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "requests" +version = "2.30.0" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "ruff" +version = "0.0.267" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.267-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:4adbbbe314d8fcc539a245065bad89446a3cef2e0c9cf70bf7bb9ed6fe31856d"}, + {file = "ruff-0.0.267-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:67254ae34c38cba109fdc52e4a70887de1f850fb3971e5eeef343db67305d1c1"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbe104f21a429b77eb5ac276bd5352fd8c0e1fbb580b4c772f77ee8c76825654"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:db33deef2a5e1cf528ca51cc59dd764122a48a19a6c776283b223d147041153f"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9adf1307fa9d840d1acaa477eb04f9702032a483214c409fca9dc46f5f157fe3"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0afca3633c8e2b6c0a48ad0061180b641b3b404d68d7e6736aab301c8024c424"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2972241065b1c911bce3db808837ed10f4f6f8a8e15520a4242d291083605ab6"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f731d81cb939e757b0335b0090f18ca2e9ff8bcc8e6a1cf909245958949b6e11"}, + {file = "ruff-0.0.267-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20c594eb56c19063ef5a57f89340e64c6550e169d6a29408a45130a8c3068adc"}, + {file = "ruff-0.0.267-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:45d61a2b01bdf61581a2ee039503a08aa603dc74a6bbe6fb5d1ce3052f5370e5"}, + {file = "ruff-0.0.267-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2107cec3699ca4d7bd41543dc1d475c97ae3a21ea9212238b5c2088fa8ee7722"}, + {file = "ruff-0.0.267-py3-none-musllinux_1_2_i686.whl", hash = "sha256:786de30723c71fc46b80a173c3313fc0dbe73c96bd9da8dd1212cbc2f84cdfb2"}, + {file = "ruff-0.0.267-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a898953949e37c109dd242cfcf9841e065319995ebb7cdfd213b446094a942f"}, + {file = "ruff-0.0.267-py3-none-win32.whl", hash = "sha256:d12ab329474c46b96d962e2bdb92e3ad2144981fe41b89c7770f370646c0101f"}, + {file = "ruff-0.0.267-py3-none-win_amd64.whl", hash = "sha256:d09aecc9f5845586ba90911d815f9772c5a6dcf2e34be58c6017ecb124534ac4"}, + {file = "ruff-0.0.267-py3-none-win_arm64.whl", hash = "sha256:7df7eb5f8d791566ba97cc0b144981b9c080a5b861abaf4bb35a26c8a77b83e9"}, + {file = "ruff-0.0.267.tar.gz", hash = "sha256:632cec7bbaf3c06fcf0a72a1dd029b7d8b7f424ba95a574aaa135f5d20a00af7"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.13" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7ad24c85f2a1caf0cd1ae8c2fdb668777a51a02246d9039420f94bd7dbfd37ed"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db24d2738add6db19d66ca820479d2f8f96d3f5a13c223f27fa28dd2f268a4bd"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72746ec17a7d9c5acf2c57a6e6190ceba3dad7127cd85bb17f24e90acc0e8e3f"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:755f653d693f9b8f4286d987aec0d4279821bf8d179a9de8e8a5c685e77e57d6"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0d20f27edfd6f35b388da2bdcd7769e4ffa374fef8994980ced26eb287e033a"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37de4010f53f452e94e5ed6684480432cfe6a7a8914307ef819cd028b05b98d5"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-win32.whl", hash = "sha256:31f72bb300eed7bfdb373c7c046121d84fa0ae6f383089db9505ff553ac27cef"}, + {file = "SQLAlchemy-2.0.13-cp310-cp310-win_amd64.whl", hash = "sha256:ec2f525273528425ed2f51861b7b88955160cb95dddb17af0914077040aff4a5"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2424a84f131901fbb20a99844d47b38b517174c6e964c8efb15ea6bb9ced8c2b"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f9832815257969b3ca9bf0501351e4c02c8d60cbd3ec9f9070d5b0f8852900e"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a30e4db983faa5145e00ef6eaf894a2d503b3221dbf40a595f3011930d3d0bac"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f717944aee40e9f48776cf85b523bb376aa2d9255a268d6d643c57ab387e7264"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9119795d2405eb23bf7e6707e228fe38124df029494c1b3576459aa3202ea432"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2ad9688debf1f0ae9c6e0706a4e2d33b1a01281317cee9bd1d7eef8020c5baac"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-win32.whl", hash = "sha256:c61b89803a87a3b2a394089a7dadb79a6c64c89f2e8930cc187fec43b319f8d2"}, + {file = "SQLAlchemy-2.0.13-cp311-cp311-win_amd64.whl", hash = "sha256:0aa2cbde85a6eab9263ab480f19e8882d022d30ebcdc14d69e6a8d7c07b0a871"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9ad883ac4f5225999747f0849643c4d0ec809d9ffe0ddc81a81dd3e68d0af463"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e481e54db8cec1457ee7c05f6d2329e3298a304a70d3b5e2e82e77170850b385"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e08e3831671008888bad5d160d757ef35ce34dbb73b78c3998d16aa1334c97"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f234ba3bb339ad17803009c8251f5ee65dcf283a380817fe486823b08b26383d"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:375b7ba88f261dbd79d044f20cbcd919d88befb63f26af9d084614f10cdf97a6"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-win32.whl", hash = "sha256:9136d596111c742d061c0f99bab95c5370016c4101a32e72c2b634ad5e0757e6"}, + {file = "SQLAlchemy-2.0.13-cp37-cp37m-win_amd64.whl", hash = "sha256:7612a7366a0855a04430363fb4ab392dc6818aaece0b2e325ff30ee77af9b21f"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:49c138856035cb97f0053e5e57ba90ec936b28a0b8b0020d44965c7b0c0bf03a"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a5e9e78332a5d841422b88b8c490dfd7f761e64b3430249b66c05d02f72ceab0"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd0febae872a4042da44e972c070f0fd49a85a0a7727ab6b85425f74348be14e"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:566a0ac347cf4632f551e7b28bbd0d215af82e6ffaa2556f565a3b6b51dc3f81"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e5e5dc300a0ca8755ada1569f5caccfcdca28607dfb98b86a54996b288a8ebd3"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a25b4c4fdd633501233924f873e6f6cd8970732859ecfe4ecfb60635881f70be"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-win32.whl", hash = "sha256:6777673d346071451bf7cccf8d0499024f1bd6a835fc90b4fe7af50373d92ce6"}, + {file = "SQLAlchemy-2.0.13-cp38-cp38-win_amd64.whl", hash = "sha256:2f0a355264af0952570f18457102984e1f79510f856e5e0ae652e63316d1ca23"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d93ebbff3dcf05274843ad8cf650b48ee634626e752c5d73614e5ec9df45f0ce"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fec56c7d1b6a22c8f01557de3975d962ee40270b81b60d1cfdadf2a105d10e84"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eb14a386a5b610305bec6639b35540b47f408b0a59f75999199aed5b3d40079"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f3b5236079bc3e318a92bab2cc3f669cc32127075ab03ff61cacbae1c392b8"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bf1aae95e80acea02a0a622e1c12d3fefc52ffd0fe7bda70a30d070373fbb6c3"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cdf80359b641185ae7e580afb9f88cf560298f309a38182972091165bfe1225d"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-win32.whl", hash = "sha256:f463598f9e51ccc04f0fe08500f9a0c3251a7086765350be418598b753b5561d"}, + {file = "SQLAlchemy-2.0.13-cp39-cp39-win_amd64.whl", hash = "sha256:881cc388dded44ae6e17a1666364b98bd76bcdc71b869014ae725f06ba298e0e"}, + {file = "SQLAlchemy-2.0.13-py3-none-any.whl", hash = "sha256:0d6979c9707f8b82366ba34b38b5a6fe32f75766b2e901f9820e271e95384070"}, + {file = "SQLAlchemy-2.0.13.tar.gz", hash = "sha256:8d97b37b4e60073c38bcf94e289e3be09ef9be870de88d163f16e08f2b9ded1a"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "starlette" +version = "0.27.0" +description = "The little ASGI library that shines." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, + {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-requests" +version = "2.30.0.0" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"}, + {file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"}, +] + +[package.dependencies] +types-urllib3 = "*" + +[[package]] +name = "types-urllib3" +version = "1.26.25.13" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, + {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, +] + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.8.0" +description = "Runtime inspection utilities for typing module." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"}, + {file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uvicorn" +version = "0.22.0" +description = "The lightning-fast ASGI server." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.22.0-py3-none-any.whl", hash = "sha256:e9434d3bbf05f310e762147f769c9f21235ee118ba2d2bf1155a7196448bd996"}, + {file = "uvicorn-0.22.0.tar.gz", hash = "sha256:79277ae03db57ce7d9aa0567830bbb51d7a612f54d6e1e3e92da3ef24c2c8ed8"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.17.0" +description = "Fast implementation of asyncio event loop on top of libuv" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"}, + {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"}, + {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"}, + {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"}, + {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"}, + {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"}, + {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"}, + {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"}, + {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"}, + {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"}, + {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"}, + {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"}, + {file = "uvloop-0.17.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b"}, + {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8"}, + {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62"}, + {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d"}, + {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667"}, + {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"}, + {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"}, + {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"}, + {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"}, + {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"}, + {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"}, + {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"}, + {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"}, + {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"}, + {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"}, + {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"}, + {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"}, + {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"}, +] + +[package.extras] +dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"] + +[[package]] +name = "watchfiles" +version = "0.19.0" +description = "Simple, modern and high performance file watching and code reload in python." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"}, + {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"}, + {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"}, + {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"}, + {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"}, + {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"}, + {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"}, + {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"}, + {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"}, + {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"}, + {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "websockets" +version = "11.0.3" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"}, + {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"}, + {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"}, + {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"}, + {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"}, + {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"}, + {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"}, + {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"}, + {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"}, + {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"}, + {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"}, + {file = "websockets-11.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b"}, + {file = "websockets-11.0.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0"}, + {file = "websockets-11.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af"}, + {file = "websockets-11.0.3-cp37-cp37m-win32.whl", hash = "sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f"}, + {file = "websockets-11.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"}, + {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"}, + {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"}, + {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"}, + {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"}, + {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"}, + {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"}, + {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"}, + {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"}, + {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"}, + {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"}, + {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"}, + {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"}, + {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"}, + {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"}, + {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "2527044d5148da7759c10ba006c4eff6e46abab8188d31a5d02b085d96f3119e" diff --git a/api/pyproject.toml b/api/pyproject.toml new file mode 100644 index 00000000..3b677863 --- /dev/null +++ b/api/pyproject.toml @@ -0,0 +1,34 @@ +[tool.poetry] +name = "api" +version = "0.0.1" +description = "oss website api" +authors = ["OssCameroon "] +license = "MIT" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +# prod deps +Fastapi = ">=0.95.2" +uvicorn = { extras = ["standard"], version = "<1" } +sqlalchemy = ">=2.0.13" +asyncpg = ">=0.27.0" + +[tool.poetry.group.dev.dependencies] +# test deps +pytest = ">=6.2.5" +requests = ">=2.26.0" +requests-oauthlib = ">=1.3.0" +pytest-mock = ">=3.10.0" +ruff = ">=0.0.267" +isort = ">=5.12.0" +mypy = ">=1.3.0" +libcst = ">=0.4.9" +# typing stub libs +types-requests = ">=2.30.0.0" +mypy-extensions = ">=1.0.0" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/api/requirements.txt b/api/requirements.txt deleted file mode 100644 index e553b594..00000000 --- a/api/requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -# prod deps -Flask==1.1.4 -Flask-Cors==3.0.10 -Flask-Script==2.0.6 -Flask-Restplus==0.13.0 -google-api-core==2.3.0 -google-auth==2.3.3 -google-cloud-core==2.2.1 -google-cloud-datastore==2.4.0 -googleapis-common-protos==1.54.0 -protobuf==3.19.1 -MarkupSafe==2.0.1 -requests==2.26.0 -requests-oauthlib==1.3.0 -configparser==5.2.0 -meilisearch==0.17.0 -Werkzeug==0.16.1 - -# dev deps -flake8==4.0.1 -pyflakes==2.4.0 -pytest==6.2.5 diff --git a/api/tests/app/.cmd_history b/api/tests/app/.cmd_history new file mode 100644 index 00000000..1cba5956 --- /dev/null +++ b/api/tests/app/.cmd_history @@ -0,0 +1 @@ +cd .. diff --git a/api/tests/app/test_settings.py b/api/tests/app/test_settings.py new file mode 100644 index 00000000..dd6e8e03 --- /dev/null +++ b/api/tests/app/test_settings.py @@ -0,0 +1,23 @@ +import os +from unittest.mock import patch + +from app.settings import get_conf + + +def test_get_conf_existing_key(): + key, value = 'existing_key', 'existing_value' + + with patch.dict(os.environ, {key: value}): + assert get_conf(key) == value + +def test_get_conf_missing_key(): + key, fallback = 'missing_key', 'default_value' + + with patch.dict(os.environ, {}): + assert get_conf(key, fallback) == fallback + +def test_get_conf_missing_key_no_fallback(): + key = 'missing_key' + + with patch.dict(os.environ, {}): + assert get_conf(key) == "" diff --git a/api/tests/main/test_github_controller.py b/api/tests/main/test_github_controller.py new file mode 100644 index 00000000..2dff0bea --- /dev/null +++ b/api/tests/main/test_github_controller.py @@ -0,0 +1,117 @@ +import json + +import pytest +from flask import Flask + +from app.main import create_app + + +@pytest.fixture +def app() -> Flask: + return create_app( + 'dev' + ) + +def test_get_all_users(app: Flask) -> None: + client = app.test_client() + + response = client.get('/users') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_get_user(app: Flask) -> None: + client = app.test_client() + + response = client.get('/users/elhmne') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + + +def test_search_users(app: Flask) -> None: + client = app.test_client() + + response = client.get('/users/search?query=test') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_post_search_users(app: Flask) -> None: + client = app.test_client() + + data = { + "query": "test", + "page": 1, + "count": 20, + "sort_type": "most_recent" + } + + response = client.post('/users/search', json=data) + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_get_project(app: Flask) -> None: + client = app.test_client() + + response = client.get('/projects/node-openerp') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + + +def test_get_all_projects(app: Flask) -> None: + client = app.test_client() + + response = client.get('/projects') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_search_projects(app: Flask) -> None: + client = app.test_client() + + response = client.get('/projects/search?query=test') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_post_search_projects(app: Flask) -> None: + client = app.test_client() + + data = { + "query": "test", + "page": 1, + "count": 20, + "languages": ["python", "java"], + "sort_type": "most_recent" + } + + response = client.post('/projects/search', json=data) + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) + + +def test_get_languages(app: Flask) -> None: + client = app.test_client() + + response = client.get('/languages') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, list) diff --git a/api/tests/main/test_twitter_controller.py b/api/tests/main/test_twitter_controller.py new file mode 100644 index 00000000..6409a9d6 --- /dev/null +++ b/api/tests/main/test_twitter_controller.py @@ -0,0 +1,73 @@ +import json + +import pytest +from flask import Flask + +from app.main import create_app + + +@pytest.fixture +def app() -> Flask: + return create_app( + 'dev' + ) + +def test_get_top_tweets(app: Flask) -> None: + client = app.test_client() + + response = client.get('/top-tweets') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + assert "top_tweets" in data + assert isinstance(data["top_tweets"], list) + + +def test_get_top_tweets_with_custom_count(app: Flask) -> None: + client = app.test_client() + + response = client.get('/top-tweets?count=10') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + assert "top_tweets" in data + assert isinstance(data["top_tweets"], list) + assert len(data["top_tweets"]) == 10 + + +def test_get_top_tweets_with_invalid_count(app: Flask) -> None: + client = app.test_client() + + response = client.get('/top-tweets?count=abc') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + assert "error" in data + assert data["error"] == "Invalid count parameter" + + +def test_get_top_tweets_with_negative_count(app: Flask) -> None: + client = app.test_client() + + response = client.get('/top-tweets?count=-5') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + assert "error" in data + assert data["error"] == "Invalid count parameter" + + +def test_get_top_tweets_with_zero_count(app: Flask) -> None: + client = app.test_client() + + response = client.get('/top-tweets?count=0') + + assert response.status_code == 200 + data = json.loads(response.data) + assert isinstance(data, dict) + assert "error" in data + assert data["error"] == "Invalid count parameter" diff --git a/api/tests/test_assert.py b/api/tests/test_assert.py index 26b1bf15..2044bcfa 100644 --- a/api/tests/test_assert.py +++ b/api/tests/test_assert.py @@ -1,3 +1,3 @@ # a basic test def test_assert(): - assert True == True + assert True is True diff --git a/api/wsgi.py b/api/wsgi.py deleted file mode 100644 index a49974de..00000000 --- a/api/wsgi.py +++ /dev/null @@ -1,4 +0,0 @@ -from manage import app - -if __name__ == "__main__": - app.run() diff --git a/docker-compose.yml b/docker-compose.yml index 9317f3b7..f4566470 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,66 +1,53 @@ -version: '3' +version: '3.9' services: - #find a way to tear this down after the init scripts has worked - setup-datastore: - build: ./scripts/setup-datastore/ - command: ["/wait-for-it/wait-for-it.sh", "datastore:8000", "--", "make", "run-with-emulator"] - depends_on: - - datastore - environment: - MEILISEARCH_HOST: http://meilisearch:7700 - GOOGLE_APPLICATION_CREDENTIALS: /tmp/key/creds.json - GCLOUD_PROJECT: pname - DATASTORE_EMULATOR_HOST: datastore:8000 - DATASTORE_PROJECT_ID: pname - volumes: - - ./scripts/wait-for-it/:/wait-for-it - #find a way to tear this down after the init scripts has worked - setup-meilisearch: - build: ./tools/meilisearch-index-generator/ - command: ["/wait-for-it/wait-for-it.sh", "meilisearch:7700", "--", "make", "run-with-emulator"] - depends_on: - - setup-datastore - - meilisearch + api: + build: ./api environment: - MEILISEARCH_HOST: http://meilisearch:7700 - GOOGLE_APPLICATION_CREDENTIALS: /tmp/key/creds.json - GCLOUD_PROJECT: pname - DATASTORE_EMULATOR_HOST: datastore:8000 - DATASTORE_PROJECT_ID: pname - volumes: - - ./scripts/wait-for-it/:/wait-for-it - - meilisearch: - image: getmeili/meilisearch - volumes: - - ./data.ms:/data.ms + PGDATABASE: ossdb + PGHOST: localhost + PGPASSWORD: pwd + PGPORT: 5432 + PGUSER: user ports: - - 7700:7700 + - 8811:8811 - datastore: - build: - context: ./gcloud-emulator/ - dockerfile: Dockerfile-datastore - command: gcloud beta emulators datastore start --project=pname --host-port 0.0.0.0:8000 --no-store-on-disk + db: + image: postgres + container_name: postgres_container + restart: always ports: - - 8000:8000 + - 5432:5432 + command: ["postgres", "-c", "log_statement=all"] + environment: + OSS_WEBSITE_APP_USER: ${OSS_WEBSITE_APP_USER} + OSS_WEBSITE_APP_PASSWORD: ${OSS_WEBSITE_APP_PASSWORD} + OSS_WEBSITE_APP_DATABASE: ${OSS_WEBSITE_APP_DATABASE} + POSTGRES_PASSWORD: "admin" # postgres admin password + volumes: + - ./scripts/initdb:/docker-entrypoint-initdb.d + healthcheck: + test: ["CMD", "psql", "postgresql://${OSS_WEBSITE_APP_USER}:${OSS_WEBSITE_APP_PASSWORD}@localhost/${OSS_WEBSITE_APP_DATABASE}"] + interval: 0.5s + timeout: 1s + retries: 4 + start_period: 1s - api: - build: ./api + pgadmin: + image: dpage/pgadmin4 + container_name: pgadmin4_container + restart: always + ports: + - "8080:80" depends_on: - - setup-meilisearch - - setup-datastore + db: + condition: service_healthy environment: - MEILISEARCH_HOST: http://meilisearch:7700 - GOOGLE_APPLICATION_CREDENTIALS: /tmp/key/creds.json - GCLOUD_PROJECT: pname - DATASTORE_EMULATOR_HOST: datastore:8000 - DATASTORE_PROJECT_ID: pname - ports: - - 8811:8811 - + PGADMIN_DEFAULT_EMAIL: admin@admin.com + PGADMIN_DEFAULT_PASSWORD: admin + volumes: + - pgadmin-data:/var/lib/pgadmin frontend: build: ./frontend diff --git a/gcloud-emulator/Dockerfile-datastore b/gcloud-emulator/Dockerfile-datastore deleted file mode 100644 index cccf14a0..00000000 --- a/gcloud-emulator/Dockerfile-datastore +++ /dev/null @@ -1,26 +0,0 @@ -# debian:buster-slim is used instead of alpine because the cloud bigtable emulator requires glibc. -FROM debian:buster-slim - -ARG CLOUD_SDK_VERSION=334.0.0 -ENV CLOUD_SDK_VERSION=$CLOUD_SDK_VERSION -ENV PATH /google-cloud-sdk/bin:$PATH - -RUN mkdir -p /usr/share/man/man1/ && \ - apt-get update && \ - apt-get -y install \ - curl \ - python3 \ - python3-crcmod \ - bash \ - openjdk-11-jre-headless && \ - curl -O https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - tar xzf google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - rm google-cloud-sdk-${CLOUD_SDK_VERSION}-linux-x86_64.tar.gz && \ - gcloud config set core/disable_usage_reporting true && \ - gcloud config set component_manager/disable_update_check true && \ - gcloud config set metrics/environment github_docker_image_emulator && \ - gcloud components remove anthoscli && \ - gcloud components install beta cloud-datastore-emulator && \ - rm /google-cloud-sdk/data/cli/gcloud.json && \ - rm -rf /google-cloud-sdk/.install/.backup/ && \ - find /google-cloud-sdk/ -name "__pycache__" -type d | xargs -n 1 rm -rf From 8557fdc16cc9a04642d27b251ff74d71605d068e Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 21:31:50 +0200 Subject: [PATCH 02/15] cleaning up deps --- api/poetry.lock | 71 ++++------------------------------------------ api/pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 67 deletions(-) diff --git a/api/poetry.lock b/api/poetry.lock index 7a222d38..b7709f02 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "anyio" version = "3.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -26,7 +25,6 @@ trio = ["trio (<0.22)"] name = "asyncpg" version = "0.27.0" description = "An asyncio PostgreSQL driver" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -77,7 +75,6 @@ test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] name = "atomicwrites" version = "1.4.1" description = "Atomic file writes." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -88,7 +85,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -107,7 +103,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -119,7 +114,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -204,7 +198,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -219,7 +212,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -227,27 +219,10 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "configparser" -version = "5.3.0" -description = "Updated configparser from stdlib for earlier Pythons." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, - {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, -] - -[package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] - [[package]] name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -262,7 +237,6 @@ test = ["pytest (>=6)"] name = "fastapi" version = "0.95.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -284,7 +258,6 @@ test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==23.1.0)", "coverage[toml] (>=6 name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -358,7 +331,6 @@ test = ["objgraph", "psutil"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -370,7 +342,6 @@ files = [ name = "httptools" version = "0.5.0" description = "A collection of framework independent HTTP protocol utils." -category = "main" optional = false python-versions = ">=3.5.0" files = [ @@ -424,7 +395,6 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -436,7 +406,6 @@ files = [ name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -448,7 +417,6 @@ files = [ name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -466,7 +434,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "libcst" version = "0.4.9" description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -514,7 +481,6 @@ dev = ["Sphinx (>=5.1.1)", "black (==22.10.0)", "coverage (>=4.5.4)", "fixit (== name = "mypy" version = "1.3.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -561,7 +527,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -573,7 +538,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -590,7 +554,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -602,7 +565,6 @@ files = [ name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -618,7 +580,6 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -630,7 +591,6 @@ files = [ name = "pydantic" version = "1.10.8" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -683,7 +643,6 @@ email = ["email-validator (>=1.0.3)"] name = "pytest" version = "6.2.5" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -708,7 +667,6 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm name = "pytest-mock" version = "3.10.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -726,7 +684,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -741,7 +698,6 @@ cli = ["click (>=5.0)"] name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -791,7 +747,6 @@ files = [ name = "requests" version = "2.30.0" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -813,7 +768,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -832,7 +786,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "ruff" version = "0.0.267" description = "An extremely fast Python linter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -859,7 +812,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -871,7 +823,6 @@ files = [ name = "sqlalchemy" version = "2.0.13" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -919,7 +870,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} typing-extensions = ">=4.2.0" [package.extras] @@ -949,7 +900,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "starlette" version = "0.27.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -967,7 +917,6 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -979,7 +928,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -991,7 +939,6 @@ files = [ name = "types-requests" version = "2.30.0.0" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ @@ -1006,7 +953,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.13" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ @@ -1018,7 +964,6 @@ files = [ name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1030,7 +975,6 @@ files = [ name = "typing-inspect" version = "0.8.0" description = "Runtime inspection utilities for typing module." -category = "dev" optional = false python-versions = "*" files = [ @@ -1046,7 +990,6 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1063,7 +1006,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.22.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1078,7 +1020,7 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -1089,7 +1031,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.17.0" description = "Fast implementation of asyncio event loop on top of libuv" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1134,7 +1075,6 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my name = "watchfiles" version = "0.19.0" description = "Simple, modern and high performance file watching and code reload in python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1169,7 +1109,6 @@ anyio = ">=3.0.0" name = "websockets" version = "11.0.3" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1247,5 +1186,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = "^3.10" -content-hash = "2527044d5148da7759c10ba006c4eff6e46abab8188d31a5d02b085d96f3119e" +python-versions = ">=3.10.11,<3.11" +content-hash = "304c7a0b429d8c6e3bdef825d7ea9b7924bda7af0f8dd8736373b7ddbe64de13" diff --git a/api/pyproject.toml b/api/pyproject.toml index 3b677863..3a1e5727 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -7,7 +7,7 @@ license = "MIT" readme = "README.md" [tool.poetry.dependencies] -python = "^3.11" +python = ">=3.10.11,<3.11" # prod deps Fastapi = ">=0.95.2" uvicorn = { extras = ["standard"], version = "<1" } From eff75f980ea5f29c55425ab6a6fd27aee92e34f8 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 21:55:23 +0200 Subject: [PATCH 03/15] feat(twitter): depreciation of twitter api integration --- api/.env.dist | 2 - api/Dockerfile | 2 +- api/Makefile | 2 +- api/app/main/controller/twitter_controller.py | 12 --- .../main/utils/database/twitter/top_tweets.py | 59 --------------- api/app/main/utils/database/users.py | 8 +- api/app/main/utils/dto.py | 1 - api/app/settings.py | 4 - api/tests/main/test_twitter_controller.py | 73 ------------------- docker-compose.yml | 11 ++- 10 files changed, 16 insertions(+), 158 deletions(-) delete mode 100644 api/app/main/controller/twitter_controller.py delete mode 100644 api/app/main/utils/database/twitter/top_tweets.py delete mode 100644 api/tests/main/test_twitter_controller.py diff --git a/api/.env.dist b/api/.env.dist index 34a7f946..923a0b56 100644 --- a/api/.env.dist +++ b/api/.env.dist @@ -10,9 +10,7 @@ DB_PORT = 5432 OSS_WEBSITE_APP_USER = "oss_website" OSS_WEBSITE_APP_PASSWORD = "password" OSS_WEBSITE_APP_DATABASE = "oss_website" - OSS_WEBSITE_SCHEMA = "oss_website" - OSS_WEBSITE_ADMIN_USER = "oss_website_admin" OSS_WEBSITE_ADMIN_PASSWORD = "password" diff --git a/api/Dockerfile b/api/Dockerfile index 3c8aa91f..afc6c03f 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim-buster +FROM python:3.10.11-slim-buster WORKDIR /app diff --git a/api/Makefile b/api/Makefile index d2368675..9c10d76f 100644 --- a/api/Makefile +++ b/api/Makefile @@ -58,6 +58,6 @@ lock: poetry.lock ##docker-build: build the api docker image docker-build: - docker build -t osswebsite:latest -f ./Dockerfile . + docker build -t oss-api:latest -f ./Dockerfile . .PHONY : help venv run install-deps test lint docker-build diff --git a/api/app/main/controller/twitter_controller.py b/api/app/main/controller/twitter_controller.py deleted file mode 100644 index 9168bc46..00000000 --- a/api/app/main/controller/twitter_controller.py +++ /dev/null @@ -1,12 +0,0 @@ -from app.main.utils.database.twitter.top_tweets import get_top_tweets -from app.main.utils.helpers.cache import Cache -from manage import app - -cache = Cache() - -# Ex : /top-tweets?count= -# Default count is 6 -@app.get("/top-tweets") -def top_tweets(count: int=6) -> dict: - """This method will return all top tweets""" - return get_top_tweets(cache, count) diff --git a/api/app/main/utils/database/twitter/top_tweets.py b/api/app/main/utils/database/twitter/top_tweets.py deleted file mode 100644 index 8bac5c24..00000000 --- a/api/app/main/utils/database/twitter/top_tweets.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Any - -import requests -from requests_oauthlib import OAuth1 - -from app.main.utils.helpers.cache import Cache -from app.main.utils.helpers.commons import get_trace -from app.settings import API_KEY, API_SECRET_KEY - - -def top_tweets(cache: Cache, count: int) -> dict[str, Any] | None: - """ - This method will return top-tweets - comming from the request or just the cache - - params : the cache for the context - return boolean telling if everything went well [top-tweets] as string - """ - - # nothing inside top_tweets key - if not cache.get("top-tweets"): - try: - # we make another request - # to the twitter api - print(">> Hitting twitter api...") - - search_twitter_host = "https://api.twitter.com/1.1/search/tweets.json" - tweets = requests.get( - "{}?q=%23caparledev%20-filter%3Aretweets&count={}".format( - search_twitter_host, - str(count) - ), - auth=OAuth1(API_KEY, API_SECRET_KEY) - ).content.decode() - - # and we cache it as json string for 1h - cache.set("top-tweets", tweets, 3600) - except Exception: - # We just print the trace-back here - get_trace() - - return cache.get("top-tweets") - - -def get_top_tweets(cache: Cache, count: int) -> dict[str, Any]: - """ - This method will check the return of top-tweet and send - the appropriate status code for the request - - """ - - results = top_tweets(cache, count) - error = True if results is None or "errors" in results else False - - return { - "code": 500 if error else 200, - "status": "error" if error else "success", - "result": results if not error else {} - } diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index 384c85ca..826eb34e 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -1,5 +1,6 @@ # database utils functions +from typing import Any from app.main.utils import converters from app.main.utils.database import storage @@ -10,8 +11,7 @@ def sanitize_user_data(data): @params: data """ - data = converters.convert_datetime_fields_to_string(data) - return data + return converters.convert_datetime_fields_to_string(data) def sanitize_array_of_user_data(data_arr: list): @@ -24,7 +24,7 @@ def sanitize_array_of_user_data(data_arr: list): return data_arr -def get_users(count: int = 20): +def get_users(count: int = 20) -> dict[str, Any]: """ get_users [this function fetch dev users from the database] the count of items returned by this function can be limited @@ -54,7 +54,7 @@ def get_users(count: int = 20): return response -def get_user(user_name: str): +def get_user(user_name: str) -> dict[str, Any]: """ get_user[this method fetch dev user's information from the database] diff --git a/api/app/main/utils/dto.py b/api/app/main/utils/dto.py index 89c7f009..35b706bd 100755 --- a/api/app/main/utils/dto.py +++ b/api/app/main/utils/dto.py @@ -3,4 +3,3 @@ class ApiDto: github_api = Namespace('github', description='github related operations') - twitter_api = Namespace('twitter', description='twitter related operations') diff --git a/api/app/settings.py b/api/app/settings.py index 17c5b82a..d526e759 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -19,7 +19,3 @@ def get_conf(key: str, fallback: Any = "") -> str: OSS_WEBSITE_APP_PORT = get_conf('OSS_WEBSITE_APP_PORT', 5432) OSS_WEBSITE_APP_USER = get_conf('OSS_WEBSITE_APP_USER', 'user') OSS_WEBSITE_APP_PASSWORD = get_conf('OSS_WEBSITE_APP_PASSWORD', 'pwd') - -# Twitter configurations -TWITTER_API_KEY = get_conf("TWITTER_API_KEY") -TWITTER_API_SECRET_KEY = get_conf("TWITTER_API_SECRET_KEY") diff --git a/api/tests/main/test_twitter_controller.py b/api/tests/main/test_twitter_controller.py deleted file mode 100644 index 6409a9d6..00000000 --- a/api/tests/main/test_twitter_controller.py +++ /dev/null @@ -1,73 +0,0 @@ -import json - -import pytest -from flask import Flask - -from app.main import create_app - - -@pytest.fixture -def app() -> Flask: - return create_app( - 'dev' - ) - -def test_get_top_tweets(app: Flask) -> None: - client = app.test_client() - - response = client.get('/top-tweets') - - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) - assert "top_tweets" in data - assert isinstance(data["top_tweets"], list) - - -def test_get_top_tweets_with_custom_count(app: Flask) -> None: - client = app.test_client() - - response = client.get('/top-tweets?count=10') - - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) - assert "top_tweets" in data - assert isinstance(data["top_tweets"], list) - assert len(data["top_tweets"]) == 10 - - -def test_get_top_tweets_with_invalid_count(app: Flask) -> None: - client = app.test_client() - - response = client.get('/top-tweets?count=abc') - - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) - assert "error" in data - assert data["error"] == "Invalid count parameter" - - -def test_get_top_tweets_with_negative_count(app: Flask) -> None: - client = app.test_client() - - response = client.get('/top-tweets?count=-5') - - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) - assert "error" in data - assert data["error"] == "Invalid count parameter" - - -def test_get_top_tweets_with_zero_count(app: Flask) -> None: - client = app.test_client() - - response = client.get('/top-tweets?count=0') - - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) - assert "error" in data - assert data["error"] == "Invalid count parameter" diff --git a/docker-compose.yml b/docker-compose.yml index f4566470..08cd81db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,11 @@ version: '3.9' services: api: - build: ./api + build: + context: ../osscameroon-website/api/ + dockerfile: ./Dockerfile + image: oss-api:latest + container_name: oss-api-container environment: PGDATABASE: ossdb PGHOST: localhost @@ -51,6 +55,7 @@ services: frontend: build: ./frontend + container_name: oss-front depends_on: - api environment: @@ -58,3 +63,7 @@ services: REACT_ENV: development ports: - 3000:3000 + + +volumes: + pgadmin-data: From aa9cf0e144f11b10af8568acad59d433079023d5 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 22:10:23 +0200 Subject: [PATCH 04/15] wip --- api/Dockerfile | 6 +++--- api/Makefile | 3 +++ api/manage.py | 4 ++-- docker-compose.yml | 6 ------ 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index afc6c03f..0201052b 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,13 +1,13 @@ FROM python:3.10.11-slim-buster -WORKDIR /app +WORKDIR /src COPY pyproject.toml poetry.lock ./ RUN pip install -U pip poetry && poetry install -COPY . /app +COPY . . ENTRYPOINT ["/bin/sh", "-c"] # TODO: Use gunicorn for prod and add a gunicorn config file that can be overriden by a ConfigMap -CMD ["uvicorn oss_website.main:app --host 0.0.0.0 --port 80"] +CMD ["uvicorn manage:app --host 0.0.0.0 --port 80"] diff --git a/api/Makefile b/api/Makefile index 9c10d76f..0baf18cc 100644 --- a/api/Makefile +++ b/api/Makefile @@ -60,4 +60,7 @@ lock: poetry.lock docker-build: docker build -t oss-api:latest -f ./Dockerfile . +compose-run: + docker-compose run api + .PHONY : help venv run install-deps test lint docker-build diff --git a/api/manage.py b/api/manage.py index 615532da..a8dbe795 100644 --- a/api/manage.py +++ b/api/manage.py @@ -6,9 +6,9 @@ if __name__ == "__main__": uvicorn.run( - "oss_website:app", + "manage:app", host="0.0.0.0", - port=8000, + port=8811, log_level="info", reload=True ) diff --git a/docker-compose.yml b/docker-compose.yml index 08cd81db..2a20e118 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,12 +8,6 @@ services: dockerfile: ./Dockerfile image: oss-api:latest container_name: oss-api-container - environment: - PGDATABASE: ossdb - PGHOST: localhost - PGPASSWORD: pwd - PGPORT: 5432 - PGUSER: user ports: - 8811:8811 From 5812ac5400e126e547f54b38ca5d112397c2fefd Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 22:32:32 +0200 Subject: [PATCH 05/15] feat: api running inside docker --- api/Dockerfile | 6 ++++-- docker-compose.yml | 15 --------------- 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/api/Dockerfile b/api/Dockerfile index 0201052b..942281fd 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -3,11 +3,13 @@ FROM python:3.10.11-slim-buster WORKDIR /src COPY pyproject.toml poetry.lock ./ -RUN pip install -U pip poetry && poetry install +RUN pip install -U pip poetry + +RUN poetry config virtualenvs.create false && poetry install COPY . . ENTRYPOINT ["/bin/sh", "-c"] # TODO: Use gunicorn for prod and add a gunicorn config file that can be overriden by a ConfigMap -CMD ["uvicorn manage:app --host 0.0.0.0 --port 80"] +CMD ["python3 manage.py"] diff --git a/docker-compose.yml b/docker-compose.yml index 2a20e118..75196f7b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,21 +32,6 @@ services: retries: 4 start_period: 1s - pgadmin: - image: dpage/pgadmin4 - container_name: pgadmin4_container - restart: always - ports: - - "8080:80" - depends_on: - db: - condition: service_healthy - environment: - PGADMIN_DEFAULT_EMAIL: admin@admin.com - PGADMIN_DEFAULT_PASSWORD: admin - volumes: - - pgadmin-data:/var/lib/pgadmin - frontend: build: ./frontend container_name: oss-front From 684c6d3223891e7fe18b0ea549d2dc77f3751e57 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 23:21:49 +0200 Subject: [PATCH 06/15] feat: working version (trying to connect the api to the database) --- api/.env.dist | 25 +++++------------- api/Makefile | 3 +++ api/app/main/controller/github_controller.py | 1 - .../main/utils/database/search_projects.py | 26 +++++-------------- api/app/main/utils/database/search_users.py | 25 +++++------------- api/app/settings.py | 20 ++++++++++---- api/example.config.txt | 10 ------- api/ossdb/users/import.sh | 2 +- docker-compose.yml | 6 +---- 9 files changed, 39 insertions(+), 79 deletions(-) delete mode 100644 api/example.config.txt diff --git a/api/.env.dist b/api/.env.dist index 923a0b56..ef21f6b5 100644 --- a/api/.env.dist +++ b/api/.env.dist @@ -1,26 +1,15 @@ -DEBUG = true +DEBUG = true # ~~~~~~ database stuff below ~~~~~~ -DB_HOST = localhost -DB_PORT = 5432 +DB_HOST = localhost +DB_PORT = 5432 # those vars are used by the init script of postgresql service # ONLY FOR DEV -OSS_WEBSITE_APP_USER = "oss_website" -OSS_WEBSITE_APP_PASSWORD = "password" -OSS_WEBSITE_APP_DATABASE = "oss_website" -OSS_WEBSITE_SCHEMA = "oss_website" -OSS_WEBSITE_ADMIN_USER = "oss_website_admin" -OSS_WEBSITE_ADMIN_PASSWORD = "password" - -# Those variables will be used in production environment -DATABASE_URL = "${OSS_WEBSITE_APP_USER}:${OSS_WEBSITE_APP_PASSWORD}@${DB_HOST}:${DB_PORT}/${OSS_WEBSITE_APP_DATABASE}" -DATABASE_URL_ADMIN = "${OSS_WEBSITE_ADMIN_USER}:${OSS_WEBSITE_ADMIN_PASSWORD}@${DB_HOST}:${DB_PORT}/${OSS_WEBSITE_APP_DATABASE}" +POSTGRES_USER = "user" +POSTGRES_PASSWORD = "pass" +POSTGRES_DB = "ossdb" # Auth options -TESTING = true - -# Twitter stuffs -TWITTER_API_KEY = "---------------------------" -TWITTER_API_SECRET_KEY = "---------------------------" +TESTING = true diff --git a/api/Makefile b/api/Makefile index 0baf18cc..5bbfd824 100644 --- a/api/Makefile +++ b/api/Makefile @@ -60,6 +60,9 @@ lock: poetry.lock docker-build: docker build -t oss-api:latest -f ./Dockerfile . +import-db: + # .... + compose-run: docker-compose run api diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 4dea5afc..1601225a 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -11,7 +11,6 @@ from app.main.utils.database.users import get_user, get_users from manage import app - # Ex : /users?count= @app.get("/users") async def all_users(count: int=20) -> dict : diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index 01046299..c37776e0 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -1,23 +1,10 @@ # database utils functions -import asyncpg +from app.settings import create_connection -from app.settings import (OSS_WEBSITE_APP_DATABASE, OSS_WEBSITE_APP_HOST, - OSS_WEBSITE_APP_PASSWORD, OSS_WEBSITE_APP_PORT, - OSS_WEBSITE_APP_USER) - -async def create_pool(): - return await asyncpg.create_pool( - user=OSS_WEBSITE_APP_USER, - password=OSS_WEBSITE_APP_PASSWORD, - database=OSS_WEBSITE_APP_DATABASE, - host=OSS_WEBSITE_APP_HOST, - port=OSS_WEBSITE_APP_PORT - ) - -async def get_search_projects(pool, query: str, count: int = 20, page: int = 1): +async def get_search_projects(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - conn = await pool.acquire() + conn = await create_connection() try: ret = await conn.fetch( @@ -25,7 +12,7 @@ async def get_search_projects(pool, query: str, count: int = 20, page: int = 1): f"%{query}%", count, offset ) finally: - await pool.release(conn) + await conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} @@ -39,7 +26,6 @@ async def get_search_projects(pool, query: str, count: int = 20, page: int = 1): return response async def post_search_projects( - pool, query: str, languages: list[str] = [], sort_type: str = "", @@ -47,7 +33,7 @@ async def post_search_projects( page: int = 1 ): offset = (page - 1) * count - conn = await pool.acquire() + conn = await create_connection() try: if sort_type == 'alphabetic': @@ -75,7 +61,7 @@ async def post_search_projects( f"%{query}%", languages, count, offset ) finally: - await pool.release(conn) + await conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 101e7e8a..6b48c55d 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -1,23 +1,11 @@ # database utils functions -import asyncpg -from app.settings import (OSS_WEBSITE_APP_DATABASE, OSS_WEBSITE_APP_HOST, - OSS_WEBSITE_APP_PASSWORD, OSS_WEBSITE_APP_PORT, - OSS_WEBSITE_APP_USER) +from app.settings import create_connection -async def create_pool(): - return await asyncpg.create_pool( - user=OSS_WEBSITE_APP_USER, - password=OSS_WEBSITE_APP_PASSWORD, - database=OSS_WEBSITE_APP_DATABASE, - host=OSS_WEBSITE_APP_HOST, - port=OSS_WEBSITE_APP_PORT - ) - -async def get_search_users(pool, query: str, count: int = 20, page: int = 1): +async def get_search_users(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - conn = await pool.acquire() + conn = await create_connection() try: ret = await conn.fetch( @@ -25,7 +13,7 @@ async def get_search_users(pool, query: str, count: int = 20, page: int = 1): f"%{query}%", count, offset ) finally: - await pool.release(conn) + await conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} @@ -39,14 +27,13 @@ async def get_search_users(pool, query: str, count: int = 20, page: int = 1): return response async def post_search_users( - pool, query: str, sort_type: str = "", count: int = 20, page: int = 1 ): offset = (page - 1) * count - conn = await pool.acquire() + conn = await create_connection() try: if sort_type == 'alphabetic': @@ -68,7 +55,7 @@ async def post_search_users( f"%{query}%", count, offset ) finally: - await pool.release(conn) + await conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} diff --git a/api/app/settings.py b/api/app/settings.py index d526e759..f13a6f1d 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -2,6 +2,7 @@ # All settings/parameter for the application import os from typing import Any +import asyncpg def get_conf(key: str, fallback: Any = "") -> str: @@ -14,8 +15,17 @@ def get_conf(key: str, fallback: Any = "") -> str: return os.environ.get(key, default=fallback) # database configurations -OSS_WEBSITE_APP_DATABASE = get_conf('OSS_WEBSITE_APP_DATABASE', 'ossdb') -OSS_WEBSITE_APP_HOST = get_conf('OSS_WEBSITE_APP_HOST', "localhost") -OSS_WEBSITE_APP_PORT = get_conf('OSS_WEBSITE_APP_PORT', 5432) -OSS_WEBSITE_APP_USER = get_conf('OSS_WEBSITE_APP_USER', 'user') -OSS_WEBSITE_APP_PASSWORD = get_conf('OSS_WEBSITE_APP_PASSWORD', 'pwd') +OSS_WEBSITE_APP_USER = get_conf('POSTGRES_USER', 'user') +OSS_WEBSITE_APP_PASSWORD = get_conf('POSTGRES_PASSWORD', 'pwd') +OSS_WEBSITE_APP_DATABASE = get_conf('POSTGRES_DB', 'ossdb') +OSS_WEBSITE_APP_HOST = get_conf('DB_HOST', "localhost") +OSS_WEBSITE_APP_PORT = get_conf('DB_PORT', 5432) + +async def create_connection(): + return await asyncpg.connect( + user=OSS_WEBSITE_APP_USER, + password=OSS_WEBSITE_APP_PASSWORD, + database=OSS_WEBSITE_APP_DATABASE, + host=OSS_WEBSITE_APP_HOST, + port=OSS_WEBSITE_APP_PORT + ) diff --git a/api/example.config.txt b/api/example.config.txt deleted file mode 100644 index 47cd1851..00000000 --- a/api/example.config.txt +++ /dev/null @@ -1,10 +0,0 @@ -[postgres] -OSS_WEBSITE_APP_DATABASE = ossdb -OSS_WEBSITE_APP_HOST = localhost -OSS_WEBSITE_APP_PASSWORD = pwd -OSS_WEBSITE_APP_PORT = 5432 -OSS_WEBSITE_APP_USER = user - -[twitter] -API_KEY = ---------- -API_SECRET_KEY = ---------- diff --git a/api/ossdb/users/import.sh b/api/ossdb/users/import.sh index ae098088..801c7ccb 100644 --- a/api/ossdb/users/import.sh +++ b/api/ossdb/users/import.sh @@ -4,7 +4,7 @@ json_file="user_page_1.json" db_host="localhost" db_name="ossdb" db_user="user" -db_password="pwd" +db_password="pass" _import() { diff --git a/docker-compose.yml b/docker-compose.yml index 75196f7b..4943ec3a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,11 +18,7 @@ services: ports: - 5432:5432 command: ["postgres", "-c", "log_statement=all"] - environment: - OSS_WEBSITE_APP_USER: ${OSS_WEBSITE_APP_USER} - OSS_WEBSITE_APP_PASSWORD: ${OSS_WEBSITE_APP_PASSWORD} - OSS_WEBSITE_APP_DATABASE: ${OSS_WEBSITE_APP_DATABASE} - POSTGRES_PASSWORD: "admin" # postgres admin password + env_file: ./api/.env volumes: - ./scripts/initdb:/docker-entrypoint-initdb.d healthcheck: From 75a830d8cd951119d764e469c0523c79a946279b Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Fri, 11 Aug 2023 23:29:12 +0200 Subject: [PATCH 07/15] feat: no warnings(env db not set) + clean and update of the app --- api/app/main/controller/github_controller.py | 8 ++++---- api/app/main/utils/database/search_projects.py | 9 +++------ api/app/main/utils/database/search_users.py | 8 ++------ api/app/main/utils/database/users.py | 8 ++------ docker-compose.yml | 2 +- 5 files changed, 12 insertions(+), 23 deletions(-) diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 1601225a..59ae1a14 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -32,7 +32,7 @@ async def search_users(query: str, count: int=20, page: int=1) -> dict : This request will return the list of users that match the query string """ - return get_search_users( + return await get_search_users( query=query, count=count, page=page @@ -44,7 +44,7 @@ async def user_search_infos(request: Request) -> dict : """This request will return all github users that matches search query field""" request_json: dict[str, Any] = await request.json() or {} - return post_search_users( + return await post_search_users( query=request_json.get("query", ""), sort_type=request_json.get("sort_type", ""), page=request_json.get("page", 1), @@ -72,7 +72,7 @@ async def project_search(query: str, count: int=20, page: int=1) -> dict : This request will return all github projects that matches search query field """ - return get_search_projects( + return await get_search_projects( query=query, count=count, page=page @@ -87,7 +87,7 @@ async def project_search_infos(request: Request) -> dict : """ request_json = await request.json() - return post_search_projects( + return await post_search_projects( query=request_json.get("query", ""), sort_type=request_json.get("sort_type", ""), languages=request_json.get("languages", []), diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index c37776e0..e2058130 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -1,4 +1,5 @@ # database utils functions + from app.settings import create_connection @@ -17,14 +18,12 @@ async def get_search_projects(query: str, count: int = 20, page: int = 1): if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - response = { + return { "code": 200, "status": "success", "result": ret, } - return response - async def post_search_projects( query: str, languages: list[str] = [], @@ -66,10 +65,8 @@ async def post_search_projects( if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - response = { + return { "code": 200, "status": "success", "result": ret, } - - return response diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 6b48c55d..5e47dc84 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -18,14 +18,12 @@ async def get_search_users(query: str, count: int = 20, page: int = 1): if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - response = { + return { "code": 200, "status": "success", "result": ret, } - return response - async def post_search_users( query: str, sort_type: str = "", @@ -60,10 +58,8 @@ async def post_search_users( if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - response = { + return { "code": 200, "status": "success", "result": ret, } - - return response diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index 826eb34e..cb07f755 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -45,14 +45,12 @@ def get_users(count: int = 20) -> dict[str, Any]: result = sanitize_array_of_user_data(result) - response = { + return { "code": 200, "status": "success", "result": result, } - return response - def get_user(user_name: str) -> dict[str, Any]: """ @@ -74,6 +72,4 @@ def get_user(user_name: str) -> dict[str, Any]: result = sanitize_user_data(result[0]) - response = {"code": 200, "status": "success", "result": result} - - return response + return {"code": 200, "status": "success", "result": result} diff --git a/docker-compose.yml b/docker-compose.yml index 4943ec3a..735bc2ef 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ services: volumes: - ./scripts/initdb:/docker-entrypoint-initdb.d healthcheck: - test: ["CMD", "psql", "postgresql://${OSS_WEBSITE_APP_USER}:${OSS_WEBSITE_APP_PASSWORD}@localhost/${OSS_WEBSITE_APP_DATABASE}"] + test: ["CMD", "psql", "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost/${POSTGRES_DB}"] interval: 0.5s timeout: 1s retries: 4 From 755dec006e8fa650567e8cccba294522289c9e39 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sat, 12 Aug 2023 00:00:57 +0200 Subject: [PATCH 08/15] feat: add a status / check for the api --- api/app/main/__init__.py | 20 ++++++- api/app/main/controller/github_controller.py | 55 ++++++++++--------- api/app/main/utils/database/languages.py | 3 +- .../main/utils/database/search_projects.py | 14 ++--- api/app/main/utils/database/search_users.py | 9 +-- api/app/settings.py | 1 + docker-compose.yml | 4 +- 7 files changed, 64 insertions(+), 42 deletions(-) diff --git a/api/app/main/__init__.py b/api/app/main/__init__.py index 130d41f1..55478eee 100755 --- a/api/app/main/__init__.py +++ b/api/app/main/__init__.py @@ -1,5 +1,23 @@ +from typing import Any from fastapi import FastAPI +from app.main.controller.github_controller import github_router +async def status() -> dict[str, Any]: + return { + "status": "ok", + "version": "1.0", + "apis": [ + {"path": "api/v1"} + ] + } + def create_app(): - return FastAPI() + app = FastAPI(title='OssCameroon API', version="0.1") + app.include_router(github_router) + + # Add middleware/event_handler and everything else + + app.get("/")(status) + + return app diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 59ae1a14..635cc6ca 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -1,37 +1,40 @@ from typing import Any -from fastapi import Request +from fastapi import APIRouter, Request from app.main.utils.database.languages import get_languages -from app.main.utils.database.projects import get_project, get_projects +# from app.main.utils.database.projects import get_project, get_projects from app.main.utils.database.search_projects import (get_search_projects, post_search_projects) from app.main.utils.database.search_users import (get_search_users, post_search_users) -from app.main.utils.database.users import get_user, get_users -from manage import app +# from app.main.utils.database.users import get_user, get_users + +github_router = APIRouter(prefix='/api/v1/github') # Ex : /users?count= -@app.get("/users") -async def all_users(count: int=20) -> dict : - """This method will return all github users with filter""" - return get_users(count) +# @github_router.get("/users") +# async def all_users(count: int=20) -> dict : +# """This method will return all github users with filter""" +# return get_users(count) -# Ex : /users/elhmne -@app.get("/users/") -async def user_infos_username(user_name: str) -> dict : - """This method will return a github user with more informations""" - return get_user(user_name) +# # Ex : /users/elhmne +# @github_router.get("/users/") +# async def user_infos_username(user_name: str) -> dict : +# """This method will return a github user with more informations""" +# return get_user(user_name) # Ex : /users/search?query=&count=&page= -@app.get("/users/search") +@github_router.get("/users/search") async def search_users(query: str, count: int=20, page: int=1) -> dict : """ This request will return the list of users that match the query string """ + + raise Exception("NOOOOOOOOOOOOOOOOOOOOOO2") return await get_search_users( query=query, count=count, @@ -39,7 +42,7 @@ async def search_users(query: str, count: int=20, page: int=1) -> dict : ) -@app.post("/users/search") +@github_router.post("/users/search") async def user_search_infos(request: Request) -> dict : """This request will return all github users that matches search query field""" request_json: dict[str, Any] = await request.json() or {} @@ -53,20 +56,20 @@ async def user_search_infos(request: Request) -> dict : # Ex : /projects?count= -@app.get("/projects") -async def all_projects(count: int=20) -> dict : - """This request will return all github projects""" - return get_projects(count) +# @github_router.get("/projects") +# async def all_projects(count: int=20) -> dict : +# """This request will return all github projects""" +# return get_projects(count) -@app.get("/projects") -async def user_infos_project(project_name: str) -> dict : - """This request will return a github project by name""" - return get_project(project_name) +# @github_router.get("/projects") +# async def user_infos_project(project_name: str) -> dict : +# """This request will return a github project by name""" +# return get_project(project_name) # Ex : /projects/search?query=&count=&page= -@app.get("/projects/search") +@github_router.get("/projects/search") async def project_search(query: str, count: int=20, page: int=1) -> dict : """ This request will return all github projects @@ -79,7 +82,7 @@ async def project_search(query: str, count: int=20, page: int=1) -> dict : ) -@app.post("/projects/search") +@github_router.post("/projects/search") async def project_search_infos(request: Request) -> dict : """ This request will return all github projects @@ -97,7 +100,7 @@ async def project_search_infos(request: Request) -> dict : # Ex : /languages -@app.get("/languages") +@github_router.get("/languages") async def github_languages() -> dict : """This request will return a list of github languages""" return get_languages() diff --git a/api/app/main/utils/database/languages.py b/api/app/main/utils/database/languages.py index 852d2079..b7984b74 100644 --- a/api/app/main/utils/database/languages.py +++ b/api/app/main/utils/database/languages.py @@ -354,5 +354,4 @@ def get_languages(): get_languages[this method returns github languages] """ - response = {"code": 200, "status": "success", "result": languages} - return response + return {"code": 200, "status": "success", "result": languages} diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index e2058130..3fadeeec 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -8,12 +8,12 @@ async def get_search_projects(query: str, count: int = 20, page: int = 1): conn = await create_connection() try: - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) finally: - await conn.close() + conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} @@ -36,31 +36,31 @@ async def post_search_projects( try: if sort_type == 'alphabetic': - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 AND ' 'language = ANY($2) ORDER BY name LIMIT $3 OFFSET $4', f"%{query}%", languages, count, offset ) elif sort_type == 'most_recent': - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 AND ' 'language = ANY($2) ORDER BY created_at DESC LIMIT $3 OFFSET $4', f"%{query}%", languages, count, offset ) elif sort_type == 'popularity': - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 AND ' 'language = ANY($2) ORDER BY stargazers_count DESC LIMIT $3 OFFSET $4', f"%{query}%", languages, count, offset ) else: - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 AND ' 'language = ANY($2) LIMIT $3 OFFSET $4', f"%{query}%", languages, count, offset ) finally: - await conn.close() + conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 5e47dc84..0149d22f 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -5,6 +5,7 @@ async def get_search_users(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count + raise Exception("NOOOOOOOOOOOOOOOOOOOOOO1") conn = await create_connection() try: @@ -35,25 +36,25 @@ async def post_search_users( try: if sort_type == 'alphabetic': - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM users WHERE name LIKE $1 ' 'ORDER BY name LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) elif sort_type == 'most_recent': - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM users WHERE name LIKE $1 ' 'ORDER BY created_at DESC LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) else: - ret = await conn.fetch( + ret = conn.fetch( 'SELECT * FROM users WHERE name LIKE $1 ' 'LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) finally: - await conn.close() + conn.close() if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} diff --git a/api/app/settings.py b/api/app/settings.py index f13a6f1d..0a92c53d 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -22,6 +22,7 @@ def get_conf(key: str, fallback: Any = "") -> str: OSS_WEBSITE_APP_PORT = get_conf('DB_PORT', 5432) async def create_connection(): + raise Exception('NOT WORKINGGGGGG !!') return await asyncpg.connect( user=OSS_WEBSITE_APP_USER, password=OSS_WEBSITE_APP_PASSWORD, diff --git a/docker-compose.yml b/docker-compose.yml index 735bc2ef..a1485677 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,13 +7,13 @@ services: context: ../osscameroon-website/api/ dockerfile: ./Dockerfile image: oss-api:latest - container_name: oss-api-container + container_name: oss-api ports: - 8811:8811 db: image: postgres - container_name: postgres_container + container_name: postgres restart: always ports: - 5432:5432 From 87bac505200286eea9fc93e39b32039529bda97a Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 15:16:44 +0200 Subject: [PATCH 09/15] feat: erase config.txt --- .gitignore | 1 + api/Makefile | 7 +------ api/app/main/__init__.py | 2 ++ 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index ed327c64..243f3fde 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # for database as json files api/ossdb/users/data/ api/ossdb/projects/data/ +Session.vim .history .idea diff --git a/api/Makefile b/api/Makefile index 5bbfd824..ddd98d9a 100644 --- a/api/Makefile +++ b/api/Makefile @@ -1,6 +1,5 @@ .DEFAULT_GOAL=help SHELL := /bin/bash -CONFIG_FILE=./config.txt VENVPATH=venv PY3=python3.11 PYTHON=$(VENVPATH)/bin/$(PY3) @@ -17,12 +16,8 @@ $(VENVPATH)/bin/activate: poetry.lock poetry install; \ touch $(VENVPATH)/bin/activate; -$(CONFIG_FILE): - echo "Adding config file..." - cp example.config.txt $(CONFIG_FILE) - ##install: setup your dev environment -install: venv $(CONFIG_FILE) +install: venv ##run: run the api locally run: install diff --git a/api/app/main/__init__.py b/api/app/main/__init__.py index 55478eee..e6415fdf 100755 --- a/api/app/main/__init__.py +++ b/api/app/main/__init__.py @@ -1,5 +1,7 @@ from typing import Any + from fastapi import FastAPI + from app.main.controller.github_controller import github_router From e83decc70d11052d8c163abe18825ddffcd3cc4e Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 15:17:02 +0200 Subject: [PATCH 10/15] feat: updates on the codebase - accept only query as a dict inside post request - remove some Exception from the code base --- api/app/main/controller/github_controller.py | 39 +++----------------- api/app/main/utils/database/search_users.py | 2 +- api/app/main/utils/database/users.py | 1 + api/app/settings.py | 3 +- api/pyproject.toml | 2 +- 5 files changed, 11 insertions(+), 36 deletions(-) diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 635cc6ca..156f30ba 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -3,29 +3,13 @@ from fastapi import APIRouter, Request from app.main.utils.database.languages import get_languages -# from app.main.utils.database.projects import get_project, get_projects from app.main.utils.database.search_projects import (get_search_projects, post_search_projects) from app.main.utils.database.search_users import (get_search_users, post_search_users) -# from app.main.utils.database.users import get_user, get_users github_router = APIRouter(prefix='/api/v1/github') -# Ex : /users?count= -# @github_router.get("/users") -# async def all_users(count: int=20) -> dict : -# """This method will return all github users with filter""" -# return get_users(count) - - -# # Ex : /users/elhmne -# @github_router.get("/users/") -# async def user_infos_username(user_name: str) -> dict : -# """This method will return a github user with more informations""" -# return get_user(user_name) - - # Ex : /users/search?query=&count=&page= @github_router.get("/users/search") async def search_users(query: str, count: int=20, page: int=1) -> dict : @@ -33,8 +17,6 @@ async def search_users(query: str, count: int=20, page: int=1) -> dict : This request will return the list of users that match the query string """ - - raise Exception("NOOOOOOOOOOOOOOOOOOOOOO2") return await get_search_users( query=query, count=count, @@ -47,27 +29,16 @@ async def user_search_infos(request: Request) -> dict : """This request will return all github users that matches search query field""" request_json: dict[str, Any] = await request.json() or {} + assert "query" in request_json, "query is required for search/filtering" + return await post_search_users( - query=request_json.get("query", ""), + query=request_json["query"], sort_type=request_json.get("sort_type", ""), page=request_json.get("page", 1), count=request_json.get("count", 20) ) -# Ex : /projects?count= -# @github_router.get("/projects") -# async def all_projects(count: int=20) -> dict : -# """This request will return all github projects""" -# return get_projects(count) - - -# @github_router.get("/projects") -# async def user_infos_project(project_name: str) -> dict : -# """This request will return a github project by name""" -# return get_project(project_name) - - # Ex : /projects/search?query=&count=&page= @github_router.get("/projects/search") async def project_search(query: str, count: int=20, page: int=1) -> dict : @@ -90,8 +61,10 @@ async def project_search_infos(request: Request) -> dict : """ request_json = await request.json() + assert "query" in request_json, "query is required for search/filtering" + return await post_search_projects( - query=request_json.get("query", ""), + query=request_json["query"], sort_type=request_json.get("sort_type", ""), languages=request_json.get("languages", []), page=request_json.get("page", 1), diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 0149d22f..71446f6c 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -5,7 +5,7 @@ async def get_search_users(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - raise Exception("NOOOOOOOOOOOOOOOOOOOOOO1") + # raise Exception("NOOOOOOOOOOOOOOOOOOOOOO1") conn = await create_connection() try: diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index cb07f755..a9c08297 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -1,6 +1,7 @@ # database utils functions from typing import Any + from app.main.utils import converters from app.main.utils.database import storage diff --git a/api/app/settings.py b/api/app/settings.py index 0a92c53d..20a05cd7 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -2,6 +2,7 @@ # All settings/parameter for the application import os from typing import Any + import asyncpg @@ -22,7 +23,7 @@ def get_conf(key: str, fallback: Any = "") -> str: OSS_WEBSITE_APP_PORT = get_conf('DB_PORT', 5432) async def create_connection(): - raise Exception('NOT WORKINGGGGGG !!') + # raise Exception('NOT WORKINGGGGGG !!') return await asyncpg.connect( user=OSS_WEBSITE_APP_USER, password=OSS_WEBSITE_APP_PASSWORD, diff --git a/api/pyproject.toml b/api/pyproject.toml index 3a1e5727..b3eeb446 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -7,7 +7,7 @@ license = "MIT" readme = "README.md" [tool.poetry.dependencies] -python = ">=3.10.11,<3.11" +python = ">=3.10.11,<3.12" # prod deps Fastapi = ">=0.95.2" uvicorn = { extras = ["standard"], version = "<1" } From 86fbeb13e6db6b2938b2d4a0b983f2c2099085d2 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 16:06:50 +0200 Subject: [PATCH 11/15] feat: update on search user + clean controlers + add get_conection method --- api/app/main/controller/github_controller.py | 27 +++++++--- api/app/main/utils/database/search_users.py | 55 ++++++++++++++------ api/app/main/utils/database/users.py | 55 -------------------- api/app/settings.py | 25 +++++---- 4 files changed, 75 insertions(+), 87 deletions(-) diff --git a/api/app/main/controller/github_controller.py b/api/app/main/controller/github_controller.py index 156f30ba..7bdf58d1 100644 --- a/api/app/main/controller/github_controller.py +++ b/api/app/main/controller/github_controller.py @@ -5,13 +5,30 @@ from app.main.utils.database.languages import get_languages from app.main.utils.database.search_projects import (get_search_projects, post_search_projects) -from app.main.utils.database.search_users import (get_search_users, - post_search_users) +from app.main.utils.database.search_users import (get_search_users, get_user, + get_users, post_search_users) github_router = APIRouter(prefix='/api/v1/github') -# Ex : /users/search?query=&count=&page= -@github_router.get("/users/search") + +# Ex : /users?count= +@github_router.get("/users") +async def all_users(count: int=20) -> dict : + """This method will return all github users with filter""" + return await get_users(count) + + +# Ex : /users/elhmne +@github_router.get("/users/") +async def user_infos_username(user_name: str) -> dict : + """This method will return a github user with more informations""" + return await get_user(user_name) + + +@github_router.get( + "/users/search", + description="/users/search?query=&count=&page=" +) async def search_users(query: str, count: int=20, page: int=1) -> dict : """ This request will return the list of users that @@ -30,7 +47,6 @@ async def user_search_infos(request: Request) -> dict : request_json: dict[str, Any] = await request.json() or {} assert "query" in request_json, "query is required for search/filtering" - return await post_search_users( query=request_json["query"], sort_type=request_json.get("sort_type", ""), @@ -62,7 +78,6 @@ async def project_search_infos(request: Request) -> dict : request_json = await request.json() assert "query" in request_json, "query is required for search/filtering" - return await post_search_projects( query=request_json["query"], sort_type=request_json.get("sort_type", ""), diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 71446f6c..4b35a562 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -1,16 +1,44 @@ # database utils functions -from app.settings import create_connection +from app.settings import create_connection, get_connection + + +async def get_user(username: str) -> dict: + conn = await create_connection() + + try: + ret = await conn.fetch( + 'SELECT * FROM users WHERE login LIKE $1', username + ) + finally: + await conn.close() + + if not ret or len(ret) < 1: + return {"code": 400, "reason": "nothing found"} + + return ret + +async def get_users(count: int) -> dict: + try: + ret = await get_connection().fetch( + 'SELECT * FROM users LIMIT $1', count + ) + finally: + await conn.close() + + if not ret or len(ret) < 1: + return {"code": 400, "reason": "nothing found"} + + return ret async def get_search_users(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - # raise Exception("NOOOOOOOOOOOOOOOOOOOOOO1") conn = await create_connection() try: ret = await conn.fetch( - 'SELECT * FROM users WHERE name LIKE $1 LIMIT $2 OFFSET $3', + 'SELECT * FROM users WHERE login LIKE $1 LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) finally: @@ -19,11 +47,8 @@ async def get_search_users(query: str, count: int = 20, page: int = 1): if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return { - "code": 200, - "status": "success", - "result": ret, - } + return ret + async def post_search_users( query: str, @@ -37,19 +62,19 @@ async def post_search_users( try: if sort_type == 'alphabetic': ret = conn.fetch( - 'SELECT * FROM users WHERE name LIKE $1 ' - 'ORDER BY name LIMIT $2 OFFSET $3', + 'SELECT * FROM users WHERE login LIKE $1 ' + 'ORDER BY login LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) elif sort_type == 'most_recent': ret = conn.fetch( - 'SELECT * FROM users WHERE name LIKE $1 ' + 'SELECT * FROM users WHERE login LIKE $1 ' 'ORDER BY created_at DESC LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) else: ret = conn.fetch( - 'SELECT * FROM users WHERE name LIKE $1 ' + 'SELECT * FROM users WHERE login LIKE $1 ' 'LIMIT $2 OFFSET $3', f"%{query}%", count, offset ) @@ -59,8 +84,4 @@ async def post_search_users( if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return { - "code": 200, - "status": "success", - "result": ret, - } + return ret diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index a9c08297..e0f46ad4 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -1,10 +1,6 @@ # database utils functions -from typing import Any - from app.main.utils import converters -from app.main.utils.database import storage - def sanitize_user_data(data): """ @@ -23,54 +19,3 @@ def sanitize_array_of_user_data(data_arr: list): for data in data_arr: data = sanitize_user_data(data) return data_arr - - -def get_users(count: int = 20) -> dict[str, Any]: - """ - get_users [this function fetch dev users from the database] - the count of items returned by this function can be limited - to the size of data the datastore is able to return - - @params : count - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ - - client = storage.get_client() - query = client.query(kind=storage.KIND_USERS) - result = list(query.fetch(limit=count)) - - if not result or len(result) < 1: - return {"code": 400, "reason": "nothing found"} - - result = sanitize_array_of_user_data(result) - - return { - "code": 200, - "status": "success", - "result": result, - } - - -def get_user(user_name: str) -> dict[str, Any]: - """ - get_user[this method fetch dev user's information - from the database] - - @params : user_name - @returns : Object reponse for the github user infos - - """ - - client = storage.get_client() - query = client.query(kind=storage.KIND_USERS) - query = query.add_filter("login", "=", user_name) - result = list(query.fetch()) - - if not result or len(result) < 1: - return {"code": 400, "reason": "nothing found"} - - result = sanitize_user_data(result[0]) - - return {"code": 200, "status": "success", "result": result} diff --git a/api/app/settings.py b/api/app/settings.py index 20a05cd7..a360295c 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -22,12 +22,19 @@ def get_conf(key: str, fallback: Any = "") -> str: OSS_WEBSITE_APP_HOST = get_conf('DB_HOST', "localhost") OSS_WEBSITE_APP_PORT = get_conf('DB_PORT', 5432) -async def create_connection(): - # raise Exception('NOT WORKINGGGGGG !!') - return await asyncpg.connect( - user=OSS_WEBSITE_APP_USER, - password=OSS_WEBSITE_APP_PASSWORD, - database=OSS_WEBSITE_APP_DATABASE, - host=OSS_WEBSITE_APP_HOST, - port=OSS_WEBSITE_APP_PORT - ) +__PG_CONNECTION = None + +async def get_connection(): + global __PG_CONNECTION + if __PG_CONNECTION is None: + __PG_CONNECTION = await asyncpg.connect( + user=OSS_WEBSITE_APP_USER, + password=OSS_WEBSITE_APP_PASSWORD, + database=OSS_WEBSITE_APP_DATABASE, + host=OSS_WEBSITE_APP_HOST, + port=OSS_WEBSITE_APP_PORT + ) + try: + yield __PG_CONNECTION + finally: + await __PG_CONNECTION.close() From 73d98be8d1c11c5eb8baaf60831d34da85cdcf99 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 16:08:47 +0200 Subject: [PATCH 12/15] from create_connection to get_connection --- api/app/main/utils/database/search_projects.py | 6 +++--- api/app/main/utils/database/search_users.py | 11 ++++++----- api/app/main/utils/database/users.py | 1 + 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index 3fadeeec..187d33ff 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -1,11 +1,11 @@ # database utils functions -from app.settings import create_connection +from app.settings import get_connection async def get_search_projects(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - conn = await create_connection() + conn = await get_connection() try: ret = conn.fetch( @@ -32,7 +32,7 @@ async def post_search_projects( page: int = 1 ): offset = (page - 1) * count - conn = await create_connection() + conn = await get_connection() try: if sort_type == 'alphabetic': diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index 4b35a562..f8e9cbd8 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -1,10 +1,10 @@ # database utils functions -from app.settings import create_connection, get_connection +from app.settings import get_connection async def get_user(username: str) -> dict: - conn = await create_connection() + conn = await get_connection() try: ret = await conn.fetch( @@ -19,8 +19,9 @@ async def get_user(username: str) -> dict: return ret async def get_users(count: int) -> dict: + conn = await get_connection() try: - ret = await get_connection().fetch( + ret = conn.fetch( 'SELECT * FROM users LIMIT $1', count ) finally: @@ -34,7 +35,7 @@ async def get_users(count: int) -> dict: async def get_search_users(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count - conn = await create_connection() + conn = await get_connection() try: ret = await conn.fetch( @@ -57,7 +58,7 @@ async def post_search_users( page: int = 1 ): offset = (page - 1) * count - conn = await create_connection() + conn = await get_connection() try: if sort_type == 'alphabetic': diff --git a/api/app/main/utils/database/users.py b/api/app/main/utils/database/users.py index e0f46ad4..78e37ef8 100644 --- a/api/app/main/utils/database/users.py +++ b/api/app/main/utils/database/users.py @@ -2,6 +2,7 @@ from app.main.utils import converters + def sanitize_user_data(data): """ sanitize_user_data [prepare user data format] From db6a62ed0bea126d3c10920d67867d6e2091b761 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 16:38:05 +0200 Subject: [PATCH 13/15] wip: cleaning + type checking + asycnpg-stubs --- api/app/main/config.py | 36 ----- api/app/main/utils/database/projects.py | 82 +++------- .../main/utils/database/search_projects.py | 2 + api/app/main/utils/database/storage.py | 12 -- api/app/main/utils/decorator.py | 43 ----- api/app/main/utils/dto.py | 5 - api/app/main/utils/helpers/cache.py | 2 +- api/app/settings.py | 29 ++-- api/poetry.lock | 102 +++++++----- api/pyproject.toml | 1 + api/tests/main/test_github_controller.py | 152 +++++++++--------- 11 files changed, 173 insertions(+), 293 deletions(-) delete mode 100755 api/app/main/config.py delete mode 100644 api/app/main/utils/database/storage.py delete mode 100755 api/app/main/utils/decorator.py delete mode 100755 api/app/main/utils/dto.py diff --git a/api/app/main/config.py b/api/app/main/config.py deleted file mode 100755 index 7e2b27f1..00000000 --- a/api/app/main/config.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -basedir = os.path.abspath(os.path.dirname(__file__)) - - -class Config: - SECRET_KEY = os.getenv('SECRET_KEY', 'my_precious_secret_key') - DEBUG = False - # app envs - APP_HOST = os.getenv('APP_HOST', '0.0.0.0') - APP_PORT = os.getenv('APP_PORT', '8811') - APP_VERSION = os.getenv('APP_VERSION', '0.0.1') - APP_NAME = os.getenv('APP_NAME', 'CaParleDev-WebSite') - -class DevelopmentConfig(Config): - DEBUG = True - - -class TestingConfig(Config): - DEBUG = True - TESTING = True - - -class ProductionConfig(Config): - DEBUG = False - - -config_by_name = dict( - dev=DevelopmentConfig, - test=TestingConfig, - prod=ProductionConfig -) - -key = Config.SECRET_KEY -app_port = Config.APP_PORT -app_host = Config.APP_HOST diff --git a/api/app/main/utils/database/projects.py b/api/app/main/utils/database/projects.py index 11d5fcb4..1d0fdef9 100644 --- a/api/app/main/utils/database/projects.py +++ b/api/app/main/utils/database/projects.py @@ -1,7 +1,7 @@ # database utils functions from app.main.utils import converters -from app.main.utils.database import storage +from app.settings import get_connection def sanitize_project_data(data): @@ -10,8 +10,7 @@ def sanitize_project_data(data): @params: data """ - data = converters.convert_datetime_fields_to_string(data) - return data + return converters.convert_datetime_fields_to_string(data) def sanitize_array_of_project_data(data_arr: list): @@ -24,70 +23,33 @@ def sanitize_array_of_project_data(data_arr: list): return data_arr -def get_one_page_of_projects(cursor=None, limit: int = 20): - client = storage.get_client() - query = client.query(kind=storage.KIND_PROJECTS) - query_iter = query.fetch(start_cursor=cursor, limit=limit) - page = next(query_iter.pages) +async def get_project(projectname: str) -> dict: + conn = await get_connection() - result = list(page) - result = sanitize_array_of_project_data(result) + try: + ret = await conn.fetch( + 'SELECT * FROM projects WHERE name = $1', projectname + ) + finally: + await conn.close() - next_cursor = query_iter.next_page_token - return result, next_cursor - - -def get_projects(count: int = 20): - """ - get_users [this function fetch open - source projects from the database] - the count of items returned by this function - can be limited to the size of data - the datastore is able to return - - @params : count - @returns : - code : the status code of the request - - status the status string of the request - - result the result of that request - """ - - client = storage.get_client() - query = client.query(kind=storage.KIND_PROJECTS) - result = list(query.fetch(limit=count)) - - if not result or len(result) < 1: + if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - result = sanitize_array_of_project_data(result) + return ret - response = { - "code": 200, - "status": "success", - "result": result, - } - return response +async def get_projects(offset: int, limit: int) -> dict: + conn = await get_connection() -def get_project(project_name: str): - """ - get_user[this method fetch dev user's information - from the database] + try: + ret = conn.fetch( + 'SELECT * FROM projects LIMIT $1 OFFSET $2', limit, offset + ) + finally: + await conn.close() - @params : user_name - @returns : Object reponse for the github user infos - - """ - - client = storage.get_client() - query = client.query(kind=storage.KIND_PROJECTS) - query = query.add_filter("name", "=", project_name) - result = list(query.fetch()) - - if not result or len(result) < 1: + if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - result = sanitize_project_data(result[0]) - - response = {"code": 200, "status": "success", "result": result} - - return response + return ret diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index 187d33ff..76bcc379 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -7,6 +7,8 @@ async def get_search_projects(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count conn = await get_connection() + assert conn is not None + try: ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 LIMIT $2 OFFSET $3', diff --git a/api/app/main/utils/database/storage.py b/api/app/main/utils/database/storage.py deleted file mode 100644 index 924370e7..00000000 --- a/api/app/main/utils/database/storage.py +++ /dev/null @@ -1,12 +0,0 @@ -from google.cloud import datastore - -KIND_USERS = "github_users" -KIND_PROJECTS = "github_projects" -__CLIENT = None - - -def get_client(): - global __CLIENT - if __CLIENT is None: - __CLIENT = datastore.Client() - return __CLIENT diff --git a/api/app/main/utils/decorator.py b/api/app/main/utils/decorator.py deleted file mode 100755 index 20bb261b..00000000 --- a/api/app/main/utils/decorator.py +++ /dev/null @@ -1,43 +0,0 @@ -# from functools import wraps -# -# from flask import request -# -# from app.main.service.auth_helper import Auth -# -# -# def token_required(f): -# @wraps(f) -# def decorated(*args, **kwargs): -# -# data, status = Auth.get_logged_in_user(request) -# token = data.get('data') -# -# if not token: -# return data, status -# -# return f(*args, **kwargs) -# -# return decorated - - -# def admin_token_required(f): -# @wraps(f) -# def decorated(*args, **kwargs): - -# data, status = Auth.get_logged_in_user(request) -# token = data.get('data') - -# if not token: -# return data, status - -# admin = token.get('admin') -# if not admin: -# response_object = { -# 'status': 'fail', -# 'message': 'admin token required' -# } -# return response_object, 401 - -# return f(*args, **kwargs) - -# return decorated diff --git a/api/app/main/utils/dto.py b/api/app/main/utils/dto.py deleted file mode 100755 index 35b706bd..00000000 --- a/api/app/main/utils/dto.py +++ /dev/null @@ -1,5 +0,0 @@ -from flask_restplus import Namespace - - -class ApiDto: - github_api = Namespace('github', description='github related operations') diff --git a/api/app/main/utils/helpers/cache.py b/api/app/main/utils/helpers/cache.py index 0a235625..6e062b83 100644 --- a/api/app/main/utils/helpers/cache.py +++ b/api/app/main/utils/helpers/cache.py @@ -6,7 +6,7 @@ class Cache(object): - _cache_ = {} + _cache_: dict = {} VALUE = 0 EXPIRES = 1 diff --git a/api/app/settings.py b/api/app/settings.py index a360295c..9fde57a0 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -5,6 +5,7 @@ import asyncpg +__PG_CONNECTION = None def get_conf(key: str, fallback: Any = "") -> str: """ @@ -15,26 +16,18 @@ def get_conf(key: str, fallback: Any = "") -> str: return os.environ.get(key, default=fallback) -# database configurations -OSS_WEBSITE_APP_USER = get_conf('POSTGRES_USER', 'user') -OSS_WEBSITE_APP_PASSWORD = get_conf('POSTGRES_PASSWORD', 'pwd') -OSS_WEBSITE_APP_DATABASE = get_conf('POSTGRES_DB', 'ossdb') -OSS_WEBSITE_APP_HOST = get_conf('DB_HOST', "localhost") -OSS_WEBSITE_APP_PORT = get_conf('DB_PORT', 5432) - -__PG_CONNECTION = None - async def get_connection(): global __PG_CONNECTION + if __PG_CONNECTION is None: __PG_CONNECTION = await asyncpg.connect( - user=OSS_WEBSITE_APP_USER, - password=OSS_WEBSITE_APP_PASSWORD, - database=OSS_WEBSITE_APP_DATABASE, - host=OSS_WEBSITE_APP_HOST, - port=OSS_WEBSITE_APP_PORT + user=get_conf('POSTGRES_USER', 'user'), + password=get_conf('POSTGRES_PASSWORD', 'pwd'), + database=get_conf('POSTGRES_DB', 'ossdb'), + host=get_conf('DB_HOST', 'localhost'), + port=get_conf('DB_PORT', 5432) ) - try: - yield __PG_CONNECTION - finally: - await __PG_CONNECTION.close() + try: + yield __PG_CONNECTION + finally: + await __PG_CONNECTION.close() #type:ignore diff --git a/api/poetry.lock b/api/poetry.lock index b7709f02..66711098 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -23,53 +23,71 @@ trio = ["trio (<0.22)"] [[package]] name = "asyncpg" -version = "0.27.0" +version = "0.28.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.7.0" files = [ - {file = "asyncpg-0.27.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fca608d199ffed4903dce1bcd97ad0fe8260f405c1c225bdf0002709132171c2"}, - {file = "asyncpg-0.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20b596d8d074f6f695c13ffb8646d0b6bb1ab570ba7b0cfd349b921ff03cfc1e"}, - {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a6206210c869ebd3f4eb9e89bea132aefb56ff3d1b7dd7e26b102b17e27bbb1"}, - {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7a94c03386bb95456b12c66026b3a87d1b965f0f1e5733c36e7229f8f137747"}, - {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bfc3980b4ba6f97138b04f0d32e8af21d6c9fa1f8e6e140c07d15690a0a99279"}, - {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9654085f2b22f66952124de13a8071b54453ff972c25c59b5ce1173a4283ffd9"}, - {file = "asyncpg-0.27.0-cp310-cp310-win32.whl", hash = "sha256:879c29a75969eb2722f94443752f4720d560d1e748474de54ae8dd230bc4956b"}, - {file = "asyncpg-0.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab0f21c4818d46a60ca789ebc92327d6d874d3b7ccff3963f7af0a21dc6cff52"}, - {file = "asyncpg-0.27.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:18f77e8e71e826ba2d0c3ba6764930776719ae2b225ca07e014590545928b576"}, - {file = "asyncpg-0.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2232d4625c558f2aa001942cac1d7952aa9f0dbfc212f63bc754277769e1ef2"}, - {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a3a4ff43702d39e3c97a8786314123d314e0f0e4dabc8367db5b665c93914de"}, - {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccddb9419ab4e1c48742457d0c0362dbdaeb9b28e6875115abfe319b29ee225d"}, - {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:768e0e7c2898d40b16d4ef7a0b44e8150db3dd8995b4652aa1fe2902e92c7df8"}, - {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609054a1f47292a905582a1cfcca51a6f3f30ab9d822448693e66fdddde27920"}, - {file = "asyncpg-0.27.0-cp311-cp311-win32.whl", hash = "sha256:8113e17cfe236dc2277ec844ba9b3d5312f61bd2fdae6d3ed1c1cdd75f6cf2d8"}, - {file = "asyncpg-0.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb71211414dd1eeb8d31ec529fe77cff04bf53efc783a5f6f0a32d84923f45cf"}, - {file = "asyncpg-0.27.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4750f5cf49ed48a6e49c6e5aed390eee367694636c2dcfaf4a273ca832c5c43c"}, - {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:eca01eb112a39d31cc4abb93a5aef2a81514c23f70956729f42fb83b11b3483f"}, - {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5710cb0937f696ce303f5eed6d272e3f057339bb4139378ccecafa9ee923a71c"}, - {file = "asyncpg-0.27.0-cp37-cp37m-win_amd64.whl", hash = "sha256:71cca80a056ebe19ec74b7117b09e650990c3ca535ac1c35234a96f65604192f"}, - {file = "asyncpg-0.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4bb366ae34af5b5cabc3ac6a5347dfb6013af38c68af8452f27968d49085ecc0"}, - {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16ba8ec2e85d586b4a12bcd03e8d29e3d99e832764d6a1d0b8c27dbbe4a2569d"}, - {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d20dea7b83651d93b1eb2f353511fe7fd554752844523f17ad30115d8b9c8cd6"}, - {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e56ac8a8237ad4adec97c0cd4728596885f908053ab725e22900b5902e7f8e69"}, - {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf21ebf023ec67335258e0f3d3ad7b91bb9507985ba2b2206346de488267cad0"}, - {file = "asyncpg-0.27.0-cp38-cp38-win32.whl", hash = "sha256:69aa1b443a182b13a17ff926ed6627af2d98f62f2fe5890583270cc4073f63bf"}, - {file = "asyncpg-0.27.0-cp38-cp38-win_amd64.whl", hash = "sha256:62932f29cf2433988fcd799770ec64b374a3691e7902ecf85da14d5e0854d1ea"}, - {file = "asyncpg-0.27.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fddcacf695581a8d856654bc4c8cfb73d5c9df26d5f55201722d3e6a699e9629"}, - {file = "asyncpg-0.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d8585707ecc6661d07367d444bbaa846b4e095d84451340da8df55a3757e152"}, - {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:975a320baf7020339a67315284a4d3bf7460e664e484672bd3e71dbd881bc692"}, - {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2232ebae9796d4600a7819fc383da78ab51b32a092795f4555575fc934c1c89d"}, - {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:88b62164738239f62f4af92567b846a8ef7cf8abf53eddd83650603de4d52163"}, - {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eb4b2fdf88af4fb1cc569781a8f933d2a73ee82cd720e0cb4edabbaecf2a905b"}, - {file = "asyncpg-0.27.0-cp39-cp39-win32.whl", hash = "sha256:8934577e1ed13f7d2d9cea3cc016cc6f95c19faedea2c2b56a6f94f257cea672"}, - {file = "asyncpg-0.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b6499de06fe035cf2fa932ec5617ed3f37d4ebbf663b655922e105a484a6af9"}, - {file = "asyncpg-0.27.0.tar.gz", hash = "sha256:720986d9a4705dd8a40fdf172036f5ae787225036a7eb46e704c45aa8f62c054"}, + {file = "asyncpg-0.28.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a6d1b954d2b296292ddff4e0060f494bb4270d87fb3655dd23c5c6096d16d83"}, + {file = "asyncpg-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0740f836985fd2bd73dca42c50c6074d1d61376e134d7ad3ad7566c4f79f8184"}, + {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e907cf620a819fab1737f2dd90c0f185e2a796f139ac7de6aa3212a8af96c050"}, + {file = "asyncpg-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b339984d55e8202e0c4b252e9573e26e5afa05617ed02252544f7b3e6de3e9"}, + {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c402745185414e4c204a02daca3d22d732b37359db4d2e705172324e2d94e85"}, + {file = "asyncpg-0.28.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c88eef5e096296626e9688f00ab627231f709d0e7e3fb84bb4413dff81d996d7"}, + {file = "asyncpg-0.28.0-cp310-cp310-win32.whl", hash = "sha256:90a7bae882a9e65a9e448fdad3e090c2609bb4637d2a9c90bfdcebbfc334bf89"}, + {file = "asyncpg-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:76aacdcd5e2e9999e83c8fbcb748208b60925cc714a578925adcb446d709016c"}, + {file = "asyncpg-0.28.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0e08fe2c9b3618459caaef35979d45f4e4f8d4f79490c9fa3367251366af207"}, + {file = "asyncpg-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b24e521f6060ff5d35f761a623b0042c84b9c9b9fb82786aadca95a9cb4a893b"}, + {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99417210461a41891c4ff301490a8713d1ca99b694fef05dabd7139f9d64bd6c"}, + {file = "asyncpg-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f029c5adf08c47b10bcdc857001bbef551ae51c57b3110964844a9d79ca0f267"}, + {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ad1d6abf6c2f5152f46fff06b0e74f25800ce8ec6c80967f0bc789974de3c652"}, + {file = "asyncpg-0.28.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d7fa81ada2807bc50fea1dc741b26a4e99258825ba55913b0ddbf199a10d69d8"}, + {file = "asyncpg-0.28.0-cp311-cp311-win32.whl", hash = "sha256:f33c5685e97821533df3ada9384e7784bd1e7865d2b22f153f2e4bd4a083e102"}, + {file = "asyncpg-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:5e7337c98fb493079d686a4a6965e8bcb059b8e1b8ec42106322fc6c1c889bb0"}, + {file = "asyncpg-0.28.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1c56092465e718a9fdcc726cc3d9dcf3a692e4834031c9a9f871d92a75d20d48"}, + {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4acd6830a7da0eb4426249d71353e8895b350daae2380cb26d11e0d4a01c5472"}, + {file = "asyncpg-0.28.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63861bb4a540fa033a56db3bb58b0c128c56fad5d24e6d0a8c37cb29b17c1c7d"}, + {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a93a94ae777c70772073d0512f21c74ac82a8a49be3a1d982e3f259ab5f27307"}, + {file = "asyncpg-0.28.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d14681110e51a9bc9c065c4e7944e8139076a778e56d6f6a306a26e740ed86d2"}, + {file = "asyncpg-0.28.0-cp37-cp37m-win32.whl", hash = "sha256:8aec08e7310f9ab322925ae5c768532e1d78cfb6440f63c078b8392a38aa636a"}, + {file = "asyncpg-0.28.0-cp37-cp37m-win_amd64.whl", hash = "sha256:319f5fa1ab0432bc91fb39b3960b0d591e6b5c7844dafc92c79e3f1bff96abef"}, + {file = "asyncpg-0.28.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b337ededaabc91c26bf577bfcd19b5508d879c0ad009722be5bb0a9dd30b85a0"}, + {file = "asyncpg-0.28.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d32b680a9b16d2957a0a3cc6b7fa39068baba8e6b728f2e0a148a67644578f4"}, + {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f62f04cdf38441a70f279505ef3b4eadf64479b17e707c950515846a2df197"}, + {file = "asyncpg-0.28.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f20cac332c2576c79c2e8e6464791c1f1628416d1115935a34ddd7121bfc6a4"}, + {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59f9712ce01e146ff71d95d561fb68bd2d588a35a187116ef05028675462d5ed"}, + {file = "asyncpg-0.28.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9e9f9ff1aa0eddcc3247a180ac9e9b51a62311e988809ac6152e8fb8097756"}, + {file = "asyncpg-0.28.0-cp38-cp38-win32.whl", hash = "sha256:9e721dccd3838fcff66da98709ed884df1e30a95f6ba19f595a3706b4bc757e3"}, + {file = "asyncpg-0.28.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ba7d06a0bea539e0487234511d4adf81dc8762249858ed2a580534e1720db00"}, + {file = "asyncpg-0.28.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d009b08602b8b18edef3a731f2ce6d3f57d8dac2a0a4140367e194eabd3de457"}, + {file = "asyncpg-0.28.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ec46a58d81446d580fb21b376ec6baecab7288ce5a578943e2fc7ab73bf7eb39"}, + {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b48ceed606cce9e64fd5480a9b0b9a95cea2b798bb95129687abd8599c8b019"}, + {file = "asyncpg-0.28.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8858f713810f4fe67876728680f42e93b7e7d5c7b61cf2118ef9153ec16b9423"}, + {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e18438a0730d1c0c1715016eacda6e9a505fc5aa931b37c97d928d44941b4bf"}, + {file = "asyncpg-0.28.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e9c433f6fcdd61c21a715ee9128a3ca48be8ac16fa07be69262f016bb0f4dbd2"}, + {file = "asyncpg-0.28.0-cp39-cp39-win32.whl", hash = "sha256:41e97248d9076bc8e4849da9e33e051be7ba37cd507cbd51dfe4b2d99c70e3dc"}, + {file = "asyncpg-0.28.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ed77f00c6aacfe9d79e9eff9e21729ce92a4b38e80ea99a58ed382f42ebd55b"}, + {file = "asyncpg-0.28.0.tar.gz", hash = "sha256:7252cdc3acb2f52feaa3664280d3bcd78a46bd6c10bfd681acfffefa1120e278"}, ] [package.extras] -dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "flake8 (>=5.0.4,<5.1.0)", "pytest (>=6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "uvloop (>=0.15.3)"] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "asyncpg-stubs" +version = "0.28.0" +description = "asyncpg stubs" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "asyncpg_stubs-0.28.0-py3-none-any.whl", hash = "sha256:db89ae4ec715e8ba5ba6f103b1e322ff101f39940333a0f68dc22f57ecaeda82"}, + {file = "asyncpg_stubs-0.28.0.tar.gz", hash = "sha256:dd3a93e153046a92fb85d704457fe6020a3db3865308a796419e74736d70d167"}, +] + +[package.dependencies] +asyncpg = ">=0.28,<0.29" +typing-extensions = ">=4.2.0,<5.0.0" [[package]] name = "atomicwrites" @@ -1186,5 +1204,5 @@ files = [ [metadata] lock-version = "2.0" -python-versions = ">=3.10.11,<3.11" -content-hash = "304c7a0b429d8c6e3bdef825d7ea9b7924bda7af0f8dd8736373b7ddbe64de13" +python-versions = ">=3.10.11,<3.12" +content-hash = "1c56ef9018b9bf82af0679ab53481270c15b43bbe707bfce533d766fae8b023f" diff --git a/api/pyproject.toml b/api/pyproject.toml index b3eeb446..f4b104a0 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -27,6 +27,7 @@ libcst = ">=0.4.9" # typing stub libs types-requests = ">=2.30.0.0" mypy-extensions = ">=1.0.0" +asyncpg-stubs = "^0.28.0" [build-system] diff --git a/api/tests/main/test_github_controller.py b/api/tests/main/test_github_controller.py index 2dff0bea..3b3c8e9d 100644 --- a/api/tests/main/test_github_controller.py +++ b/api/tests/main/test_github_controller.py @@ -1,117 +1,117 @@ -import json +# import json -import pytest -from flask import Flask +# import pytest +# from flask import Flask -from app.main import create_app +# from app.main import create_app -@pytest.fixture -def app() -> Flask: - return create_app( - 'dev' - ) +# @pytest.fixture +# def app() -> Flask: +# return create_app( +# 'dev' +# ) -def test_get_all_users(app: Flask) -> None: - client = app.test_client() +# def test_get_all_users(app: Flask) -> None: +# client = app.test_client() - response = client.get('/users') +# response = client.get('/users') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_get_user(app: Flask) -> None: - client = app.test_client() +# def test_get_user(app: Flask) -> None: +# client = app.test_client() - response = client.get('/users/elhmne') +# response = client.get('/users/elhmne') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, dict) -def test_search_users(app: Flask) -> None: - client = app.test_client() +# def test_search_users(app: Flask) -> None: +# client = app.test_client() - response = client.get('/users/search?query=test') +# response = client.get('/users/search?query=test') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_post_search_users(app: Flask) -> None: - client = app.test_client() +# def test_post_search_users(app: Flask) -> None: +# client = app.test_client() - data = { - "query": "test", - "page": 1, - "count": 20, - "sort_type": "most_recent" - } +# data = { +# "query": "test", +# "page": 1, +# "count": 20, +# "sort_type": "most_recent" +# } - response = client.post('/users/search', json=data) +# response = client.post('/users/search', json=data) - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_get_project(app: Flask) -> None: - client = app.test_client() +# def test_get_project(app: Flask) -> None: +# client = app.test_client() - response = client.get('/projects/node-openerp') +# response = client.get('/projects/node-openerp') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, dict) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, dict) -def test_get_all_projects(app: Flask) -> None: - client = app.test_client() +# def test_get_all_projects(app: Flask) -> None: +# client = app.test_client() - response = client.get('/projects') +# response = client.get('/projects') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_search_projects(app: Flask) -> None: - client = app.test_client() +# def test_search_projects(app: Flask) -> None: +# client = app.test_client() - response = client.get('/projects/search?query=test') +# response = client.get('/projects/search?query=test') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_post_search_projects(app: Flask) -> None: - client = app.test_client() +# def test_post_search_projects(app: Flask) -> None: +# client = app.test_client() - data = { - "query": "test", - "page": 1, - "count": 20, - "languages": ["python", "java"], - "sort_type": "most_recent" - } +# data = { +# "query": "test", +# "page": 1, +# "count": 20, +# "languages": ["python", "java"], +# "sort_type": "most_recent" +# } - response = client.post('/projects/search', json=data) +# response = client.post('/projects/search', json=data) - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) -def test_get_languages(app: Flask) -> None: - client = app.test_client() +# def test_get_languages(app: Flask) -> None: +# client = app.test_client() - response = client.get('/languages') +# response = client.get('/languages') - assert response.status_code == 200 - data = json.loads(response.data) - assert isinstance(data, list) +# assert response.status_code == 200 +# data = json.loads(response.data) +# assert isinstance(data, list) From 99d24d8d2b9be40e7a1bd362db3ac038a933ac50 Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 16:45:14 +0200 Subject: [PATCH 14/15] feat: working call for /languages --- api/app/main/__init__.py | 1 - api/app/main/utils/database/search_users.py | 24 +++++++++++++++++---- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/api/app/main/__init__.py b/api/app/main/__init__.py index e6415fdf..af373ec3 100755 --- a/api/app/main/__init__.py +++ b/api/app/main/__init__.py @@ -19,7 +19,6 @@ def create_app(): app.include_router(github_router) # Add middleware/event_handler and everything else - app.get("/")(status) return app diff --git a/api/app/main/utils/database/search_users.py b/api/app/main/utils/database/search_users.py index f8e9cbd8..59b7a5d2 100644 --- a/api/app/main/utils/database/search_users.py +++ b/api/app/main/utils/database/search_users.py @@ -16,7 +16,11 @@ async def get_user(username: str) -> dict: if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return ret + return { + "code": 200, + "status": "success", + "result": ret, + } async def get_users(count: int) -> dict: conn = await get_connection() @@ -30,7 +34,11 @@ async def get_users(count: int) -> dict: if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return ret + return { + "code": 200, + "status": "success", + "result": ret, + } async def get_search_users(query: str, count: int = 20, page: int = 1): @@ -48,7 +56,11 @@ async def get_search_users(query: str, count: int = 20, page: int = 1): if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return ret + return { + "code": 200, + "status": "success", + "result": ret, + } async def post_search_users( @@ -85,4 +97,8 @@ async def post_search_users( if not ret or len(ret) < 1: return {"code": 400, "reason": "nothing found"} - return ret + return { + "code": 200, + "status": "success", + "result": ret, + } From 90f82e6591ca5be4f5926b2d599dadc582851c2d Mon Sep 17 00:00:00 2001 From: sanix-darker Date: Sun, 20 Aug 2023 17:22:24 +0200 Subject: [PATCH 15/15] feat: backend can now communicate with the database on standalone --- .../main/utils/database/search_projects.py | 2 -- api/app/settings.py | 27 ++++++++++--------- docker-compose.yml | 12 ++++----- 3 files changed, 20 insertions(+), 21 deletions(-) diff --git a/api/app/main/utils/database/search_projects.py b/api/app/main/utils/database/search_projects.py index 76bcc379..187d33ff 100644 --- a/api/app/main/utils/database/search_projects.py +++ b/api/app/main/utils/database/search_projects.py @@ -7,8 +7,6 @@ async def get_search_projects(query: str, count: int = 20, page: int = 1): offset = (page - 1) * count conn = await get_connection() - assert conn is not None - try: ret = conn.fetch( 'SELECT * FROM projects WHERE name LIKE $1 LIMIT $2 OFFSET $3', diff --git a/api/app/settings.py b/api/app/settings.py index 9fde57a0..067bdc31 100644 --- a/api/app/settings.py +++ b/api/app/settings.py @@ -1,6 +1,7 @@ # settings.py # All settings/parameter for the application import os +from functools import lru_cache from typing import Any import asyncpg @@ -16,18 +17,18 @@ def get_conf(key: str, fallback: Any = "") -> str: return os.environ.get(key, default=fallback) +@lru_cache(maxsize=1) async def get_connection(): - global __PG_CONNECTION + user=get_conf('POSTGRES_USER', 'user') + password=get_conf('POSTGRES_PASSWORD', 'pass') + database=get_conf('POSTGRES_DB', 'ossdb') + host=get_conf('DB_HOST', 'localhost') + port=get_conf('DB_PORT', 5432) - if __PG_CONNECTION is None: - __PG_CONNECTION = await asyncpg.connect( - user=get_conf('POSTGRES_USER', 'user'), - password=get_conf('POSTGRES_PASSWORD', 'pwd'), - database=get_conf('POSTGRES_DB', 'ossdb'), - host=get_conf('DB_HOST', 'localhost'), - port=get_conf('DB_PORT', 5432) - ) - try: - yield __PG_CONNECTION - finally: - await __PG_CONNECTION.close() #type:ignore + return await asyncpg.connect( + user= user, + password= password, + database= database, + host= host, + port= port, + ) diff --git a/docker-compose.yml b/docker-compose.yml index a1485677..fe322a74 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,12 +21,12 @@ services: env_file: ./api/.env volumes: - ./scripts/initdb:/docker-entrypoint-initdb.d - healthcheck: - test: ["CMD", "psql", "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost/${POSTGRES_DB}"] - interval: 0.5s - timeout: 1s - retries: 4 - start_period: 1s + # healthcheck: + # test: ["CMD", "psql", "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost/${POSTGRES_DB}"] + # interval: 0.5s + # timeout: 1s + # retries: 4 + # start_period: 1s frontend: build: ./frontend