Skip to content

Commit

Permalink
Merge pull request #1272 from SEED-platform/v2.0.1
Browse files Browse the repository at this point in the history
Merge 2.0.1 release branch
  • Loading branch information
mmclark authored Mar 3, 2017
2 parents 2ceea20 + 88d1f46 commit a2f275f
Show file tree
Hide file tree
Showing 94 changed files with 4,511 additions and 2,797 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ dump.rdb
log
*.log
.tox

Vagrantfile
.vagrant*
# Ignore tmp test data
seed/data_importer/tests/data/tmp_*
seed/data_importer/tests/data/~*
Expand Down
10 changes: 6 additions & 4 deletions api/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,24 +8,27 @@
from rest_framework import routers

from api.views import test_view_with_arg, TestReverseViewSet
from seed.data_importer.views import ImportFileViewSet
from seed.data_importer.views import (
handle_s3_upload_complete,
get_upload_details,
sign_policy_document,
LocalUploaderViewSet
)
from seed.views.api import get_api_schema
from seed.views.columns import ColumnViewSet, ColumnMappingViewSet
from seed.views.cycles import CycleView
from seed.views.datasets import DatasetViewSet
from seed.views.import_files import ImportFileViewSet
from seed.views.labels import LabelViewSet, UpdateInventoryLabelsAPIView
from seed.views.main import DataFileViewSet, version, progress
from seed.views.organizations import OrganizationViewSet
from seed.views.projects import ProjectViewSet
from seed.views.users import UserViewSet
from seed.views.properties import PropertyViewSet, TaxLotViewSet
from seed.views.labels import LabelViewSet, UpdateInventoryLabelsAPIView
from seed.views.users import UserViewSet

api_v2_router = routers.DefaultRouter()
api_v2_router.register(r'columns', ColumnViewSet, base_name="columns")
api_v2_router.register(r'column_mappings', ColumnMappingViewSet, base_name="column_mappings")
api_v2_router.register(r'datasets', DatasetViewSet, base_name="datasets")
api_v2_router.register(r'organizations', OrganizationViewSet, base_name="organizations")
api_v2_router.register(r'data_files', DataFileViewSet, base_name="data_files")
Expand All @@ -38,7 +41,6 @@
api_v2_router.register(r'properties', PropertyViewSet, base_name="properties")
api_v2_router.register(r'taxlots', TaxLotViewSet, base_name="taxlots")
api_v2_router.register(r'reverse_and_test', TestReverseViewSet, base_name="reverse_and_test")
# TODO: NL: Upload needs to get moved to import_files
api_v2_router.register(r'upload', LocalUploaderViewSet, base_name='local_uploader')

urlpatterns = [
Expand Down
30 changes: 30 additions & 0 deletions bin/backup_database.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/bin/bash

# Nightly backups - crontab
# 0 0 * * * /home/ubuntu/prj/seed/bin/backup_database.sh <db_name> <db_username> <db_password>

DB_NAME=$1
DB_USERNAME=$2
# Set PGPASSWORD as pg_dump uses this env var.
DB_PASSWORD=$3

function file_name(){
echo ${BACKUP_DIR}/${DB_NAME}_$(date '+%Y%m%d_%H%M%S').dump
}

if [[ (-z ${DB_NAME}) || (-z ${DB_USERNAME}) || (-z ${DB_PASSWORD}) ]] ; then
echo "Expecting command to be of form ./backup_database.sh <db_name> <db_username> <db_password>"
exit 1
fi

# currently the backup directory is hard coded
BACKUP_DIR=~/seed-backups
mkdir -p ${BACKUP_DIR}

export PGPASSWORD=${DB_PASSWORD}
echo "pg_dump -U ${DB_USERNAME} -Fc ${DB_NAME} > $(file_name)"
pg_dump -U ${DB_USERNAME} -Fc ${DB_NAME} > $(file_name)
unset PGPASSWORD

# Delete files older than 30 days
find ${BACKUP_DIR} -mtime +30 -type f -name '*.sql' -delete
3 changes: 2 additions & 1 deletion bower.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@
"moment": "~2.17.1",
"ng-tags-input": "~3.0.0",
"spin.js": "~2.3.2",
"ui-grid-draggable-rows": "~0.3.2"
"ui-grid-draggable-rows": "~0.3.2",
"angular-dragula": "^1.2.8"
},
"resolutions": {
"angular": "~1.6.1",
Expand Down
32 changes: 0 additions & 32 deletions config/management/commands/run_parallel_tests.py

This file was deleted.

2 changes: 1 addition & 1 deletion docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ Updating this documentation
This python code documentation was generated by running the following::

$ pip install -r requirements/local.txt
$ sphinx-apidoc -o docs/source/modules . seed/lib/mcm seed/lib/superperms terrain.py
$ sphinx-apidoc -o docs/source/modules . seed/lib/mcm seed/lib/superperms
$ cd docs
$ make html

Expand Down
106 changes: 54 additions & 52 deletions seed/data_importer/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,41 +870,42 @@ def ready_to_import(self):
def num_cells(self):
return self.num_rows * self.num_columns

@property
def tcm_json(self):
# JSON used to render the mapping interface.
tcms = []
try:
row_number = 0
for tcm in self.tablecolumnmappings:
row_number += 1
error_message_text = ""
if tcm.error_message_text:
error_message_text = tcm.error_message_text.replace("\n", "<br>")

first_rows = ["", "", "", "", ""]
if tcm.first_five_rows:
first_rows = ["%s" % r for r in tcm.first_five_rows]
tcms.append({
'row_number': row_number,
'pk': tcm.pk,
'destination_model': tcm.destination_model,
'destination_field': tcm.destination_field,
'order': tcm.order,
'ignored': tcm.ignored,
'confidence': tcm.confidence,
'was_a_human_decision': tcm.was_a_human_decision,
'error_message_text': error_message_text,
'active': tcm.active,
'is_mapped': tcm.is_mapped,
'header_row': tcm.first_row,
'first_rows': first_rows,
})
except:
from traceback import print_exc
print_exc()

return json.dumps(tcms)
# TODO: 2/8/17 Verify that this can be removed
# @property
# def tcm_json(self):
# # JSON used to render the mapping interface.
# tcms = []
# try:
# row_number = 0
# for tcm in self.tablecolumnmappings:
# row_number += 1
# error_message_text = ""
# if tcm.error_message_text:
# error_message_text = tcm.error_message_text.replace("\n", "<br>")
#
# first_rows = ["", "", "", "", ""]
# if tcm.first_five_rows:
# first_rows = ["%s" % r for r in tcm.first_five_rows]
# tcms.append({
# 'row_number': row_number,
# 'pk': tcm.pk,
# 'destination_model': tcm.destination_model,
# 'destination_field': tcm.destination_field,
# 'order': tcm.order,
# 'ignored': tcm.ignored,
# 'confidence': tcm.confidence,
# 'was_a_human_decision': tcm.was_a_human_decision,
# 'error_message_text': error_message_text,
# 'active': tcm.active,
# 'is_mapped': tcm.is_mapped,
# 'header_row': tcm.first_row,
# 'first_rows': first_rows,
# })
# except:
# from traceback import print_exc
# print_exc()
#
# return json.dumps(tcms)

@property
def tcm_errors_json(self):
Expand Down Expand Up @@ -1191,23 +1192,24 @@ def first_row(self):
self._first_row = first_row
return self._first_row

@property
def first_five_rows(self):
if not hasattr(self, "_first_five_rows"):
first_rows = []
for r in self.import_file.second_to_fifth_rows:
try:
if r[self.order - 1]:
first_rows.append(r[self.order - 1])
else:
first_rows.append('')
except:
first_rows.append('')
pass

self._first_five_rows = first_rows

return self._first_five_rows
# TODO: Verify that this can be removed
# @property
# def first_five_rows(self):
# if not hasattr(self, "_first_five_rows"):
# first_rows = []
# for r in self.import_file.second_to_fifth_rows:
# try:
# if r[self.order - 1]:
# first_rows.append(r[self.order - 1])
# else:
# first_rows.append('')
# except:
# first_rows.append('')
# pass
#
# self._first_five_rows = first_rows
#
# return self._first_five_rows

@property
def destination_django_field(self):
Expand Down
Loading

0 comments on commit a2f275f

Please sign in to comment.