diff --git a/.dockerignore b/.dockerignore index 5ffc94d..78c4c28 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,5 @@ docker-compose.yml .env .dockerignore data/ -.devcontainer/ \ No newline at end of file +.devcontainer/ +.github/ \ No newline at end of file diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..744fd95 --- /dev/null +++ b/.env.sample @@ -0,0 +1,46 @@ +# Django settings +SECRET_KEY= +DEBUG=True +DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1] + +# MySQL/MariaDB settings +MYSQL_ROOT_USER=root +MYSQL_ROOT_PASSWORD=root +MYSQL_DATABASE=atlas +MYSQL_USER=atlas +MYSQL_PASSWORD=atlas +MYSQL_PORT=3306 +MYSQL_TEST_DATABASE=atlas_test +MYSQL_TEST_PORT=3306 +MYSQL_TEST_USER=root +MYSQL_TEST_PASSWORD=root + +# DJANGO Settings for testing with SQLite +DJANGO_DB_ENGINE=django.db.backends.sqlite3 +DJANGO_MYSQL_DBNAME=memory +TESTING=True + +WSGI_PREFIX=/default +WSGI_PORT=8087 +DJANGO_SECRET_KEY=test +DJANGO_MYSQL_DBUSER=${MYSQL_TEST_USER} +DJANGO_MYSQL_DBPASS=${MYSQL_TEST_PASSWORD} +DJANGO_MYSQL_DBHOST=localhost +DJANGO_MYSQL_DBPORT=${MYSQL_TEST_PORT} +DJANGO_MYSQL_TEST_DBNAME=${MYSQL_TEST_DATABASE} +DJANGO_MYSQL_TEST_DBPORT=${MYSQL_TEST_PORT} +DJANGO_MYSQL_TEST_DBUSER=${MYSQL_TEST_USER} +DJANGO_MYSQL_TEST_DBPASS=${MYSQL_TEST_PASSWORD} +DJANGO_TNS_DAEMON_SERVER= +DJANGO_TNS_DAEMON_PORT=1010 +DJANGO_MPC_DAEMON_SERVER= +DJANGO_MPC_DAEMON_PORT=1011 +DJANGO_LASAIR_TOKEN= +DJANGO_LOG_LEVEL=INFO +DJANGO_NAMESERVER_TOKEN= +DJANGO_NAMESERVER_API_URL= +DJANGO_NAMESERVER_MULTIPLIER= +DJANGO_PANSTARRS_TOKEN= +DJANGO_PANSTARRS_BASE_URL= +DJANGO_DUSTMAP_LOCATION= +API_TOKEN_EXPIRY=10 \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..840968f --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,71 @@ +name: Continuous Integration +on: [push, pull_request] +jobs: + linting: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10"] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install flake8 + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + testing-docker: + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - uses: actions/checkout@v2 + - name: Build test containers + run: | + # Copy the sample environment file and build the containers + cp .env.sample .env + # In the future we can populate the .env file with secrets, none needed for now + docker compose build + - name: Start db container + run: docker compose up db + - name: Start test container + run: | + docker compose up tests --exit-code-from tests + - name: Stop containers + if: always() + run: docker compose down + testing-pip: + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.9' + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y apache2 apache2-dev + - name: Install python dependencies + run: | + python -m pip install --upgrade pip + python -m pip install . + - name: Set initial environment + run: | + # Copy the sample environment file + cp .env.sample .env + - name: Run tests + run: | + python psat_server_web/atlas/manage.py makemigrations --noinput + python psat_server_web/atlas/manage.py test --noinput diff --git a/.gitignore b/.gitignore index 36a1778..464814b 100644 --- a/.gitignore +++ b/.gitignore @@ -53,3 +53,6 @@ recurrence_plots reports admin/ django_tables2/ + +# Log files +*.log diff --git a/Dockerfile b/Dockerfile index e3623a3..1a855d4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,6 +14,7 @@ WORKDIR /app RUN apt-get update && apt-get install -y \ build-essential \ pkg-config \ + apache2 \ apache2-dev \ libhdf5-dev \ default-mysql-client \ @@ -26,8 +27,8 @@ RUN apt-get update && apt-get install -y \ COPY . . # Install python dependencies and the package itself -RUN pip install --no-cache-dir -e . && \ - pip install --no-cache-dir mod_wsgi-standalone +RUN pip install --no-cache-dir -e . + # pip install --no-cache-dir mod_wsgi-express WORKDIR /app/psat_server_web/atlas diff --git a/README.md b/README.md index 0f62b97..3c96e11 100644 --- a/README.md +++ b/README.md @@ -11,3 +11,91 @@ site-packages/psat\_server\_web/atlas/media/images site-packages/psat\_server\_web/ps1/media/images directories which point to the location of the image stamps. + +--- + +# Local development version + +A localised development instance can be run with docker compose. You will need a +`.env` file in your repository root to define some environment variables, +looking something like: + +``` .env +# Django settings +SECRET_KEY={YOUR_SECRET_KEY_HERE} +DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1] + +# MySQL/MariaDB settings +MYSQL_ROOT_USER=root +MYSQL_ROOT_PASSWORD=root +MYSQL_DATABASE=atlas +MYSQL_USER=atlas +MYSQL_PASSWORD=atlas +MYSQL_PORT=3306 +MYSQL_TEST_DATABASE=atlas_test +MYSQL_TEST_PORT=3306 +MYSQL_TEST_USER=root +MYSQL_TEST_PASSWORD=root + +DJANGO_MYSQL_DBUSER=atlas +DJANGO_LASAIR_TOKEN= + +``` + +Several of the values are omitted but aren't necessary to run a development +environment. Also ruch simple passwords should not be used in production, but this should get you something running locally. Hopefully it's then as simple as running the following: + +``` bash +docker compose up +``` + +Which will start 4 services, comprised of: +- `db` - A MariaDB instance. By default served at `localhost:3036` +- `atlas-web` - A mod-wsgi instance to serve the django front end. By default +served at `localhost:8086` and requires `db` to be running. +- `adminer` - A web interface for interacting with the db. By default served +at `localhost:8080` and requires `db` to be running. +- `tests` - A single-use run of the unit tests using django's `manage.py test`. +Requires `db` to be running. + +Each of these services can be run independently with + +``` bash +docker compose up {service_name} +``` + +So if, for example, you wanted to run the tests you could execute +`docker compose up tests` and it would – after starting `db` if it is not +already running – perform a single run of the unit tests. + +## A dummy database + +There are a few things you will likely need to do to get a fully working version +of the web-server so as to make your local version useful, namely: +1. Get a dummy database dump .sql file from one of the developers and place it into `data/init.sql` +2. [Optional] Get an image dump and place it into `data/db_data/` +3. Fix a mild issue with the database model (see next section) + +If you are having problems with any of this, contact one of the developers. + +### Issue with `TcsGravityEventAnnotations.map_iteration` + +If you are getting an error along the lines of + +``` +atlas-web-1 | ERRORS: +atlas-web-1 | atlas.TcsGravityEventAnnotations.map_iteration: (fields.E311) 'TcsGravityAlerts.map_iteration' must be unique because it is referenced by a foreign key. +atlas-web-1 | HINT: Add unique=True to this field or add a UniqueConstraint (without condition) in the model Meta.constraints. +``` + +Then you may need to make an adjustment to the models, specifically by +uncommenting the line declaring `TcsGravityAlerts.map_iteration` which adds a +unique `kwarg`, and commenting the line without the unique `kwarg`, should solve +the problem. Specifically, the lines should look like +``` +# map_iteration = models.CharField(max_length=100, blank=True, null=True) +map_iteration = models.CharField(max_length=100, blank=True, null=True, unique=True) +``` +This is within the `TcsGravityAlerts(models.Model)` class. Again, please speak +to one of the developers if this is unclear. + diff --git a/docker-compose.yml b/docker-compose.yml index 675d5b8..1d58284 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,8 +17,8 @@ services: MYSQL_PASSWORD: ${MYSQL_PASSWORD} MARIADB_DATABASE: ${MYSQL_DATABASE} healthcheck: - test: ["CMD", "mysql", "-h", "localhost", "-u", "root", "-p${MYSQL_ROOT_PASSWORD}", "-e", "SELECT 1"] - timeout: 20s + test: healthcheck.sh --su-mysql --connect --innodb_initialized + timeout: 5s retries: 10 adminer: @@ -73,4 +73,60 @@ services: - DJANGO_NAMESERVER_API_URL='' - DJANGO_LASAIR_TOKEN=${DJANGO_LASAIR_TOKEN} - DJANGO_DUSTMAP_LOCATION=/tmp/dustmap + - API_TOKEN_EXPIRY=10 + - DJANGO_LOG_LEVEL=DEBUG + - DJANGO_DEBUG=True + - DJANGO_PANSTARRS_TOKEN=${PANSTARRS_TOKEN} + - DJANGO_PANSTARRS_BASE_URL=${PANSTARRS_BASE_URL} + tests: + build: . + image: local/psat-server-web + # Run the tests, using the root user to avoid permission issues + command: > + bash -c "python manage.py makemigrations --noinput && + python manage.py test + || exit $?" + volumes: + # Mount the code directories into the image to allow for live code changes + # while we develop + - ./data/db_data:/images + - ./psat_server_web/atlas/atlasapi:/app/psat_server_web/atlas/atlasapi + - ./psat_server_web/atlas/atlas:/app/psat_server_web/atlas/atlas + - ./psat_server_web/atlas/accounts:/app/psat_server_web/atlas/accounts + - ./psat_server_web/atlas/tests:/app/psat_server_web/atlas/tests + ports: + - 8087:8087 + depends_on: + db: + condition: service_healthy + restart: true + environment: + - WSGI_PREFIX=/atlas + - WSGI_PORT=8087 + - DJANGO_DB_ENGINE=django.db.backends.mysql + - DJANGO_MYSQL_DBNAME=${MYSQL_TEST_DATABASE} + - DJANGO_MYSQL_DBUSER=${MYSQL_TEST_USER} + - DJANGO_MYSQL_DBPASS=${MYSQL_TEST_PASSWORD} + - DJANGO_MYSQL_DBHOST=db + - DJANGO_MYSQL_DBPORT=${MYSQL_TEST_PORT} + - DJANGO_MYSQL_TEST_DBNAME=${MYSQL_TEST_DATABASE} + - DJANGO_MYSQL_TEST_DBPORT=${MYSQL_TEST_PORT} + - DJANGO_MYSQL_TEST_DBUSER=${MYSQL_TEST_USER} + - DJANGO_MYSQL_TEST_DBPASS=${MYSQL_TEST_PASSWORD} + - DJANGO_SECRET_KEY=secret + - DJANGO_TNS_DAEMON_SERVER=psat-server-web + - DJANGO_TNS_DAEMON_PORT=8001 + - DJANGO_MPC_DAEMON_SERVER=psat-server-web + - DJANGO_MPC_DAEMON_PORT=8002 + - DJANGO_NAME_DEAMON_SERVER=psat-server-web + - DJANGO_NAME_DEAMON_PORT=8003 + - DJANGO_NAMESERVER_MULTIPLIER=10000000 + - DJANGO_NAMESERVER_TOKEN='' + - DJANGO_NAMESERVER_API_URL='' + - DJANGO_LASAIR_TOKEN=${DJANGO_LASAIR_TOKEN} + - DJANGO_DUSTMAP_LOCATION=/tmp/dustmap + - DJANGO_LOG_LEVEL=DEBUG + - API_TOKEN_EXPIRY=10 + - DJANGO_PANSTARRS_TOKEN=${PANSTARRS_TOKEN} + - DJANGO_PANSTARRS_BASE_URL=${PANSTARRS_BASE_URL} \ No newline at end of file diff --git a/psat_server_web/atlas/accounts/admin.py b/psat_server_web/atlas/accounts/admin.py index ff8e8bc..39e7e1d 100644 --- a/psat_server_web/atlas/accounts/admin.py +++ b/psat_server_web/atlas/accounts/admin.py @@ -2,14 +2,30 @@ Customise admin interface for Group model to include GroupProfile, so default expiry time for tokens can be set. """ +import logging from django.contrib import admin from django.contrib.auth.models import Group +from django.forms.models import ModelForm from .models import GroupProfile + +logger = logging.getLogger(__name__) + +class AlwaysChangedModelForm(ModelForm): + def has_changed(self): + """ Should return True if data differs from initial. + By always returning true even unchanged inlines will get validated and saved. + We need this because the GroupProfile needs to be created even if the default + values haven't been changed. + """ + return True + class GroupProfileInline(admin.StackedInline): model = GroupProfile can_delete = False + extra = 1 + form = AlwaysChangedModelForm class GroupAdmin(admin.ModelAdmin): inlines = (GroupProfileInline,) diff --git a/psat_server_web/atlas/accounts/signals.py b/psat_server_web/atlas/accounts/signals.py index d22d0b6..297571f 100644 --- a/psat_server_web/atlas/accounts/signals.py +++ b/psat_server_web/atlas/accounts/signals.py @@ -1,15 +1,27 @@ -from django.db.models.signals import post_save +import logging + +from django.db.models import signals from django.dispatch import receiver from django.contrib.auth.models import Group -from .models import GroupProfile -@receiver(post_save, sender=Group) +logger = logging.getLogger(__name__) + +@receiver(signals.post_save, sender=Group) def create_group_profile(sender, instance, created, **kwargs): + # NOTE (2024-11-12 JL): This function is no longer needed as we have instead + # solved the problem of duplicate GroupProfile creation by using the + # AlwaysChangedModelForm in the admin interface, but I'm leaving this here + # as a reference for future use of signals and logging. + logger.debug('create_group_profile called') + logger.debug('instance: %s', instance) + logger.debug('created: %s', created) + + # Disconnect the signal so we don't get into a loop + signals.post_save.disconnect(create_group_profile, sender=Group) + if created: - new_profile, profile_created = GroupProfile.objects.get_or_create( - group=instance - ) - if profile_created: - # If we created a new profile then set the group's profile to it - instance.profile = new_profile - instance.save() \ No newline at end of file + logger.debug('In created block') + + # Reconnect the signal once we're done + signals.post_save.connect(create_group_profile, sender=Group) + logger.debug('create_group_profile finished') \ No newline at end of file diff --git a/psat_server_web/atlas/atlas/settings.py b/psat_server_web/atlas/atlas/settings.py index 630aace..b922937 100644 --- a/psat_server_web/atlas/atlas/settings.py +++ b/psat_server_web/atlas/atlas/settings.py @@ -25,9 +25,9 @@ SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = False +DEBUG = os.environ.get('DJANGO_DEBUG', 'False') == 'True' -TEMPLATE_DEBUG = False +TEMPLATE_DEBUG = os.environ.get('DJANGO_DEBUG', 'False') == 'True' # 2021-08-21 KWS Need to set this to None, otherwise default is 1000. DATA_UPLOAD_MAX_NUMBER_FIELDS = None @@ -57,14 +57,17 @@ DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.mysql', + 'ENGINE': os.environ.get('DJANGO_DB_ENGINE', 'django.db.backends.mysql'), 'NAME': os.environ.get('DJANGO_MYSQL_DBNAME'), 'USER': os.environ.get('DJANGO_MYSQL_DBUSER'), 'PASSWORD': os.environ.get('DJANGO_MYSQL_DBPASS'), 'HOST': os.environ.get('DJANGO_MYSQL_DBHOST'), 'PORT': int(os.environ.get('DJANGO_MYSQL_DBPORT')), 'TEST': { - 'NAME': os.environ.get('DJANGO_MYSQL_TESTDBNAME'), + 'NAME': os.environ.get('DJANGO_MYSQL_TEST_DBNAME'), + 'PORT': os.environ.get('DJANGO_MYSQL_TEST_DBPORT'), + 'USER': os.environ.get('DJANGO_MYSQL_TEST_DBUSER'), + 'PASSWORD': os.environ.get('DJANGO_MYSQL_TEST_DBPASS'), } } } @@ -84,6 +87,49 @@ } +DJANGO_LOG_LEVEL = os.environ.get('DJANGO_LOG_LEVEL', 'WARNING') +LOGGING = { + "version": 1, # the dictConfig format version + "disable_existing_loggers": False, # retain the default loggers + "formatters": { + "verbose": { + "format": "{name} {levelname} {asctime} {module} {message}", + "style": "{", + }, + "simple": { + "format": "{levelname} {message}", + "style": "{", + }, + }, + + "handlers": { + # The different log handlers are defined here. We have a file logger and + # a stderr logger - which reroutes to the error_log file managed by wsgi. + "file": { + "class": "logging.FileHandler", + "filename": "django-default.log", + "level": DJANGO_LOG_LEVEL, + "formatter": "verbose", + }, + "stderr": { + "class": "logging.StreamHandler", + "level": DJANGO_LOG_LEVEL, + "formatter": "verbose", + "stream": "ext://sys.stderr", + } + }, + # Implement the stderr logger as the default logger. + "loggers": { + "": { + "handlers": ["stderr"], + "level": DJANGO_LOG_LEVEL, + "propagate": True, + }, + } + +} + + LANGUAGE_CODE = 'en-gb' TIME_ZONE = 'UTC' diff --git a/psat_server_web/atlas/tests/atlasapi/__init__.py b/psat_server_web/atlas/tests/atlasapi/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/psat_server_web/atlas/tests/atlasapi/test_authentication.py b/psat_server_web/atlas/tests/atlasapi/test_authentication.py new file mode 100644 index 0000000..e90d25d --- /dev/null +++ b/psat_server_web/atlas/tests/atlasapi/test_authentication.py @@ -0,0 +1,111 @@ +# tests/test_authentication.py +from django.test import TestCase +from django.contrib.auth.models import User, Group +from django.utils.timezone import now, timedelta +from django.conf import settings +from rest_framework.authtoken.models import Token +from rest_framework.exceptions import AuthenticationFailed + +from atlasapi.authentication import ExpiringTokenAuthentication +from accounts.models import GroupProfile + + +class TokenAuthenticationTests(TestCase): + + def setUp(self): + # Create groups + self.week_group = Group.objects.create(name="Weekly Expiring") + self.year_group = Group.objects.create(name="Yearly Expiring") + self.never_expiring_group = Group.objects.create(name="Never Expiring") + self.no_profile_group = Group.objects.create(name="No Profile") + + # Add expiration times to group profiles + # No profile for self.no_profile_group + GroupProfile.objects.create(group=self.week_group, token_expiration_time=timedelta(weeks=1)) + GroupProfile.objects.create(group=self.year_group, token_expiration_time=timedelta(days=365)) + GroupProfile.objects.create(group=self.never_expiring_group, token_expiration_time=timedelta(days=10000)) + + # Create users and assign them to groups + self.no_group_user = User.objects.create_user(username="no_group_user", password="password") + self.no_profile_user = User.objects.create_user(username="no_profile_user", password="password") + self.week_user = User.objects.create_user(username="week_user", password="password") + self.year_user = User.objects.create_user(username="year_user", password="password") + self.never_expiring_user = User.objects.create_user(username="never_expiring_user", password="password") + + # Assign users to groups + # No group for self.no_group_user + self.no_profile_user.groups.add(self.no_profile_group) + self.week_user.groups.add(self.week_group) + self.year_user.groups.add(self.year_group) + self.never_expiring_user.groups.add(self.never_expiring_group) + + # Create tokens for each user + self.no_group_token = Token.objects.create(user=self.no_group_user) + self.no_profile_token = Token.objects.create(user=self.no_profile_user) + self.week_token = Token.objects.create(user=self.week_user) + self.year_token = Token.objects.create(user=self.year_user) + self.never_expiring_token = Token.objects.create(user=self.never_expiring_user) + + # Setup token authentication + self.auth = ExpiringTokenAuthentication() + + def test_no_group(self): + # Test: user with no group profile should have default token expiration + self.assertEqual(self.auth.authenticate_credentials(self.no_group_token.key), + (self.no_group_user, self.no_group_token)) + + self.no_group_token.created = now() - timedelta(days=settings.TOKEN_EXPIRY + 1) + self.no_group_token.save() + with self.assertRaises(AuthenticationFailed) as cm: + self.auth.authenticate_credentials(self.no_group_token.key) + self.assertEqual(str(cm.exception), 'Token has expired.') + + def test_no_profile(self): + # Test: user with no group profile should have default token expiration + with self.assertRaises(AuthenticationFailed) as cm: + self.auth.authenticate_credentials(self.no_profile_token.key) + self.assertEqual(str(cm.exception), 'Could not authenticate: Group has no profile. Please contact administrator.') + + def test_weekly_token_expiration(self): + # Test: weekly expiring token should be valid if within one week + self.assertEqual( + self.auth.authenticate_credentials(self.week_token.key), + (self.week_user, self.week_token) + ) + + # Test: weekly token should expire after one week + self.week_token.created = now() - timedelta(weeks=2) + self.week_token.save() + with self.assertRaises(AuthenticationFailed) as cm: + self.auth.authenticate_credentials(self.week_token.key) + self.assertEqual(str(cm.exception), 'Token has expired.') + + def test_yearly_token_expiration(self): + # Test: yearly expiring token should be valid if within one year + self.assertEqual( + self.auth.authenticate_credentials(self.year_token.key), + (self.year_user, self.year_token) + ) + + # Test: yearly token should expire after one year + self.year_token.created = now() - timedelta(days=400) + self.year_token.save() + with self.assertRaises(AuthenticationFailed) as cm: + self.auth.authenticate_credentials(self.year_token.key) + self.assertEqual(str(cm.exception), 'Token has expired.') + + def test_never_expiring_token(self): + # Test: never-expiring token should always be valid + self.assertEqual( + self.auth.authenticate_credentials(self.never_expiring_token.key), + (self.never_expiring_user, self.never_expiring_token) + ) + + # Even if we artificially backdate the token, it should still be valid + self.never_expiring_token.created = now() - timedelta(days=9999) + self.never_expiring_token.save() + self.assertEqual( + self.auth.authenticate_credentials(self.never_expiring_token.key), + (self.never_expiring_user, self.never_expiring_token) + ) + diff --git a/psat_server_web/atlas/tests/atlasapi/test_failure.py b/psat_server_web/atlas/tests/atlasapi/test_failure.py new file mode 100644 index 0000000..8fcf166 --- /dev/null +++ b/psat_server_web/atlas/tests/atlasapi/test_failure.py @@ -0,0 +1,8 @@ +import unittest + +from django.test import TestCase + +class TestFailure(TestCase): + @unittest.skip("Skipping this test") + def test_failure(self): + assert False \ No newline at end of file diff --git a/psat_server_web/atlas/tests/test_permissions.py b/psat_server_web/atlas/tests/atlasapi/test_permissions.py similarity index 72% rename from psat_server_web/atlas/tests/test_permissions.py rename to psat_server_web/atlas/tests/atlasapi/test_permissions.py index 6546c1d..57f8d4c 100644 --- a/psat_server_web/atlas/tests/test_permissions.py +++ b/psat_server_web/atlas/tests/atlasapi/test_permissions.py @@ -15,14 +15,16 @@ def setUp(self): self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.token.key) def test_permissions(self): - response = self.client.get(reverse('api:objects')) + endpoint = "/api/vrascores/" + response = self.client.get(endpoint) self.assertEqual(response.status_code, status.HTTP_200_OK) - response = self.client.post(reverse('api:objects')) + response = self.client.post(endpoint) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.user.is_staff = True self.user.save() - response = self.client.post(reverse('api:objects')) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) \ No newline at end of file + response = self.client.post(endpoint) + # This will now fail with a 400 because we've not provided the payload + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) \ No newline at end of file diff --git a/psat_server_web/schema/tcs_gravity_alerts.sql b/psat_server_web/schema/tcs_gravity_alerts.sql index 77aac34..3ff3f4a 100644 --- a/psat_server_web/schema/tcs_gravity_alerts.sql +++ b/psat_server_web/schema/tcs_gravity_alerts.sql @@ -1,6 +1,6 @@ -- 2023-06-15 KWS Added an "interesting" column - human override of alert significance -DROP TABLE `tcs_gravity_alerts`; +DROP TABLE IF EXISTS `tcs_gravity_alerts`; CREATE TABLE `tcs_gravity_alerts` ( `id` int NOT NULL AUTO_INCREMENT, diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..cb707d8 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,3 @@ +[flake8] +exclude = .tox,.git,psat_server_web/atlas/atlas/commonqueries.py,psat_server_web/ps1/ +max-complexity = 10 diff --git a/setup.py b/setup.py index 0e55e9d..bcdfcf7 100755 --- a/setup.py +++ b/setup.py @@ -2,6 +2,9 @@ import os moduleDirectory = os.path.dirname(os.path.realpath(__file__)) + +# Imports __version__ variable from __version__.py +__version__ = '' exec(open(moduleDirectory + "/psat_server_web/__version__.py").read()) def readme():