Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

differenate application path and name #22

Open
wants to merge 17 commits into
base: develop
Choose a base branch
from
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,5 @@ dist
**.apihub
**/.DS_Store
.secrets
prod.env
prod
26 changes: 26 additions & 0 deletions 9be9edae04c5_add_owner_and_created_at_to_application_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
"""add owner and created_at to application table

Revision ID: 9be9edae04c5
Revises:
Create Date: 2023-01-12 15:01:46.535112

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '9be9edae04c5'
down_revision = None
branch_labels = None
depends_on = None


def upgrade() -> None:
op.add_column('application', sa.Column("created_at", sa.DateTime, nullable=False))
op.add_column('application', sa.Column("owner", sa.ForeignKey('user.username'), nullable=False))


def downgrade() -> None:
op.drop_column('application', "owner")
op.drop_column('application', "created_at")
105 changes: 105 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

sqlalchemy.url =


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
81 changes: 81 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import os
from logging.config import fileConfig

from sqlalchemy import engine_from_config
from sqlalchemy import pool

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)

config.set_main_option('sqlalchemy.url', os.environ.get('DB_URI').replace('%', '%%'))

# add your model's MetaData object here
# for 'autogenerate' support
import apihub.server
from apihub.common.db_session import Base
target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.

This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.

Calls to context.execute() here emit the given string to the
script output.

"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode.

In this scenario we need to create an Engine
and associate a connection with the context.

"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade() -> None:
${downgrades if downgrades else "pass"}
27 changes: 27 additions & 0 deletions alembic/versions/5012a4422d71_add_owner_to_application_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
"""Add owner to application table

Revision ID: 5012a4422d71
Revises:
Create Date: 2023-01-15 10:55:32.594417

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = '5012a4422d71'
down_revision = None
branch_labels = None
depends_on = None


def upgrade() -> None:
op.add_column('application', sa.Column('created_at', sa.DateTime(), nullable=True))
op.add_column('application', sa.Column('owner', sa.String(), nullable=True))
op.create_foreign_key(None, 'application', 'users', ['owner'], ['username'])

def downgrade() -> None:
op.drop_constraint(None, 'application', type_='foreignkey')
op.drop_column('application', 'owner')
op.drop_column('application', 'created_at')
111 changes: 111 additions & 0 deletions apihub/activity/middlewares.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
import json
from typing import Callable, Any
from fastapi import Request
from fastapi_jwt_auth import AuthJWT
from starlette.middleware.base import BaseHTTPMiddleware

from ..common.db_session import db_context
from ..security.schemas import SecurityToken
from .schemas import ActivityBase
from .models import Activity

class ActivityLogger(BaseHTTPMiddleware):
def __init__(self, app):
super().__init__(app)

async def set_body(self, request: Request):
receive_ = await request._receive()

async def receive():
return receive_

request._receive = receive

async def dispatch(self, request: Request, call_next):
data = {
"ip": request.client.host,
"user_agent": request.headers.get("User-Agent"),
"method": request.method,
"path": request.url.path,
"headers": dict(request.headers),
"query_params": request.query_params,
}

is_recording = request.url.path.startswith("/async")

if is_recording:
await self.set_body(request)
body = await request.body()
if body:
data["request_body"] = body

# get authorization from request
authorization = request.headers.get('Authorization')
if authorization:
auth = AuthJWT(req=request)
token = SecurityToken.from_token(auth)
data["user_id"] = token.user_id

# call next middleware
response = await call_next(request)

if is_recording:
# extract response body
data["response_status_code"] = response.status_code
try:
data["response_body"] = json.dumps(await response.json(), encoding='utf-8')
except Exception as e:
pass

try:
with db_context() as session:
activity = ActivityBase(**data)
session.add(Activity(**activity.dict()))
except Exception as e:
pass

return response


async def log_activity(request: Request, call_next: Callable, session: Any):
data = {
"ip": request.client.host,
"user_agent": request.headers.get("User-Agent"),
"method": request.method,
"path": request.url.path,
"headers": dict(request.headers),
"query_params": request.query_params,
}

is_recording = request.url.path.startswith("/async")

if is_recording:
try:
data["request_body"] = json.dumps(await request.json(), encoding='utf-8')
except Exception as e:
pass

# get authorization from request
authorization = request.headers.get('Authorization')
if authorization:
auth = AuthJWT(req=request)
token = SecurityToken.from_token(auth)
data["user_id"] = token.user_id

# call next middleware
response = await call_next(request)

if is_recording:
# extract response body
data["response_status_code"] = response.status_code
try:
data["response_body"] = json.dumps(await response.json(), encoding='utf-8')
except Exception as e:
pass

# store activity
with db_context() as session:
activity = ActivityBase(**data)
session.add(Activity(**activity.dict()))

return response
Loading