diff --git a/.circleci/config.yml b/.circleci/config.yml index 94426fefd..63b8c2436 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -38,7 +38,13 @@ jobs: export PYTHONHASHSEED=${RANDOM} pip install tox --user tox -e py36-d21 deps - + - run: + name: codecov + command: | + PATH=$HOME/.local/bin:$PATH + pip install --user codecov + coverage xml + ~/.local/bin/codecov --required -X search gcov pycov -f coverage.xml --flags $CIRCLE_JOB - store_artifacts: path: ~build/coverage destination: coverage @@ -188,7 +194,7 @@ jobs: fi docker build \ --build-arg GITHUB_TOKEN=${GITHUB_TOKEN} \ - --build-arg DATAMART_VERSION=${TAG} \ + --build-arg VERSION=${TAG} \ -t ${DOCKER_IMAGE}:${TAG} \ -f docker/Dockerfile . docker images "unicef/datamart*" diff --git a/.dockerignore b/.dockerignore index 35a34362c..22429a5a7 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,7 +1,5 @@ -.git -.tox -.venv -~build -.pytest_cache -.circleci +.* +~* db +docs +tests diff --git a/CHANGES b/CHANGES index df03e5143..29824c8ee 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,14 @@ +1.6 +--- +* add ability to invalidate cache directly from admin endpoint +* add ability to queue task from task log +* invalidate cache on dataset refresh +* add RedisBoard to monitor cache entries +* new HACT endpoint +* add Microsoft style JSON output +* add Microsoft style XML output + + 1.5 --- * add .xlsx format support @@ -6,10 +17,12 @@ * enable Azure login without email * add partner.name to Intervention endpoint + 1.4.1 ----- * fixes dependencies + 1.4 --- * WARNINGS: migration reset diff --git a/Pipfile b/Pipfile index 3c429e8ed..d08c7890a 100644 --- a/Pipfile +++ b/Pipfile @@ -4,7 +4,7 @@ verify_ssl = true name = "pypi" [packages] -"psycopg2-binary" = "*" +psycopg2 = "*" admin-extra-urls = ">=2.1" celery = "*" coreapi = "*" @@ -27,7 +27,6 @@ djangorestframework-jwt = "*" drf-dynamic-serializer = ">=1.2.0" drf-extensions = "*" drf-yasg = {version = "*", extras = ["validation"]} -flower = "*" gunicorn = "*" humanize = "*" pyparsing = "*" @@ -44,6 +43,9 @@ cryptography = "*" django-countries = "*" django-filter = "*" drf-renderer-xlsx = "*" +django-redisboard = "*" +djangorestframework-xml = "*" +redis = "==2.10.6" [dev-packages] "flake8" = ">=3.6.0" diff --git a/Pipfile.lock b/Pipfile.lock index 674d7233f..b549503fc 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "4ebe763c2d5e81b5c2291549170c2ff2329dac4cafccf66b9b12e18589c19e12" + "sha256": "248d5d3c8b7361358b8f71a0951e19f52b3abd72335b3252070dab04b3e5165a" }, "pipfile-spec": 6, "requires": { @@ -37,13 +37,6 @@ ], "version": "==0.24.0" }, - "babel": { - "hashes": [ - "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", - "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" - ], - "version": "==2.6.0" - }, "billiard": { "hashes": [ "sha256:ed65448da5877b5558f19d2f7f11f8355ea76b3e63e1c0a6059f47cfae5f1c84" @@ -133,28 +126,28 @@ }, "cryptography": { "hashes": [ - "sha256:02602e1672b62e803e08617ec286041cc453e8d43f093a5f4162095506bc0beb", - "sha256:10b48e848e1edb93c1d3b797c83c72b4c387ab0eb4330aaa26da8049a6cbede0", - "sha256:17db09db9d7c5de130023657be42689d1a5f60502a14f6f745f6f65a6b8195c0", - "sha256:227da3a896df1106b1a69b1e319dce218fa04395e8cc78be7e31ca94c21254bc", - "sha256:2cbaa03ac677db6c821dac3f4cdfd1461a32d0615847eedbb0df54bb7802e1f7", - "sha256:31db8febfc768e4b4bd826750a70c79c99ea423f4697d1dab764eb9f9f849519", - "sha256:4a510d268e55e2e067715d728e4ca6cd26a8e9f1f3d174faf88e6f2cb6b6c395", - "sha256:6a88d9004310a198c474d8a822ee96a6dd6c01efe66facdf17cb692512ae5bc0", - "sha256:76936ec70a9b72eb8c58314c38c55a0336a2b36de0c7ee8fb874a4547cadbd39", - "sha256:7e3b4aecc4040928efa8a7cdaf074e868af32c58ffc9bb77e7bf2c1a16783286", - "sha256:8168bcb08403ef144ff1fb880d416f49e2728101d02aaadfe9645883222c0aa5", - "sha256:8229ceb79a1792823d87779959184a1bf95768e9248c93ae9f97c7a2f60376a1", - "sha256:8a19e9f2fe69f6a44a5c156968d9fc8df56d09798d0c6a34ccc373bb186cee86", - "sha256:8d10113ca826a4c29d5b85b2c4e045ffa8bad74fb525ee0eceb1d38d4c70dfd6", - "sha256:be495b8ec5a939a7605274b6e59fbc35e76f5ad814ae010eb679529671c9e119", - "sha256:dc2d3f3b1548f4d11786616cf0f4415e25b0fbecb8a1d2cd8c07568f13fdde38", - "sha256:e4aecdd9d5a3d06c337894c9a6e2961898d3f64fe54ca920a72234a3de0f9cb3", - "sha256:e79ab4485b99eacb2166f3212218dd858258f374855e1568f728462b0e6ee0d9", - "sha256:f995d3667301e1754c57b04e0bae6f0fa9d710697a9f8d6712e8cca02550910f" + "sha256:02915ee546b42ce513e8167140e9937fc4c81a06a82216e086ccce51f347948a", + "sha256:03cc8bc5a69ae3d44acf1a03facdb7c10a94c67907862c563e10efe72b737977", + "sha256:07f76bde6815c55195f3b3812d35769cc7c765144c0bb71ae45e02535d078591", + "sha256:13eac1c477b9af7e9a9024369468d08aead6ad78ed599d163ad046684474364b", + "sha256:179bfb585c5efc87ae0e665770e4896727b92dbc1f810c761b1ebf8363e2fec8", + "sha256:414af0ba308e74c1f8bc5b11befc86cb66b10be8959547786f64258830d2096f", + "sha256:41a1ca14f255df8c44dd22c6006441d631d1589104045ec7263cc47e9772f41a", + "sha256:54947eb98bc4eef99ddf49f45d2694ea5a3929ab3edc9806ad01967368594d82", + "sha256:5bac7a2abda07d0c3c8429210349bb54149ad8940dc7bcffedcd56519b410a3c", + "sha256:7f41af8c586bed9f59cfe8832d818b3b75c860d7025da9cd2db76875a72ff785", + "sha256:8004fae1b3cb2dbd90a011ad972e49a7e78a871b89c70cc7213cf4ebd2532bcb", + "sha256:8e0eccadc3b465e12c50a5b8fb4d39cf401b44d7bb9936c70fddb5e5aaf740d5", + "sha256:95b4741722269cfdc134fec23b7ae6503ee2aea83d0924cfee6d6ec54cd42d8e", + "sha256:a06f5aa6d7a94531dfe82eb2972e669258c452fe9cf88f76116610de4c789785", + "sha256:b0833d27c7eb536bc27323a1e8e22cb39ebac78c4ef3be0167ba40f447344808", + "sha256:b72dec675bc59a01edc96616cd48ec465b714481caa0938c8bbca5d18f17d5df", + "sha256:c800ddc23b5206ce025f23225fdde89cdc0e64016ad914d5be32d1f602ce9495", + "sha256:c980c8c313a5e014ae12e2245e89e7b30427e5a98cbb88afe478ecae85f3abaa", + "sha256:e85b410885addaeb31a867eabcefc9ef4a7e904ad45eac9e60a763a54b244626" ], "index": "pypi", - "version": "==2.3.1" + "version": "==2.4.1" }, "defusedxml": { "hashes": [ @@ -196,7 +189,6 @@ }, "django-concurrency": { "hashes": [ - "sha256:0546db842e41c50b330a0311463c874b58d2ba07f7de3d46d0ad84c99d4205d8", "sha256:233d23a8751989df6db2886957a8fbcc2408a1f16bb28262aab8a538d756d9d2" ], "index": "pypi", @@ -280,16 +272,23 @@ }, "django-redis": { "hashes": [ - "sha256:15b47faef6aefaa3f47135a2aeb67372da300e4a4cf06809c66ab392686a2155", - "sha256:a90343c33a816073b735f0bed878eaeec4f83b75fcc0dce2432189b8ea130424" + "sha256:af0b393864e91228dd30d8c85b5c44d670b5524cb161b7f9e41acc98b6e5ace7", + "sha256:f46115577063d00a890867c6964ba096057f07cb756e78e0503b89cd18e4e083" + ], + "index": "pypi", + "version": "==4.10.0" + }, + "django-redisboard": { + "hashes": [ + "sha256:5db5598b9f423d31ae7424b825a6c41c29fbc8cae2907ccc18974e862122be86", + "sha256:6c1df00d936f178f9e1a8991324ed9e7e3bf0ffefaaf195fbf4decc5c8c17d14" ], "index": "pypi", - "version": "==4.9.0" + "version": "==4.0.0" }, "django-regex": { "hashes": [ - "sha256:a4508894b49dd35e49acfbb8ee1046913ebe375ecbecebf64d30ffb9b4be6f2b", - "sha256:b3f759183f51b009885b91b0b313ed647d124a771790aab29a161f7a8ff1664b" + "sha256:a4508894b49dd35e49acfbb8ee1046913ebe375ecbecebf64d30ffb9b4be6f2b" ], "index": "pypi", "version": "==0.2.1" @@ -344,6 +343,14 @@ "index": "pypi", "version": "==1.11.0" }, + "djangorestframework-xml": { + "hashes": [ + "sha256:caea8e446298b7fe1eb9a79306f35554db7531c2e637734d32de3cf99afbdc5a", + "sha256:f7d5efc26eabbca73db0ff0f0c15b59ca08e36660c02f96563a0d937321f519f" + ], + "index": "pypi", + "version": "==1.3.0" + }, "drf-dynamic-serializer": { "hashes": [ "sha256:058ae34570c1dfce4e8e97ac4a5c4ad543279b77f97e2ca254caaff9407c149a" @@ -380,8 +387,7 @@ }, "et-xmlfile": { "hashes": [ - "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b", - "sha256:a6de963569df3b3bf5a3427e2d40495e6ce81006dacb2b2b79670a0f42a8b689" + "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" ], "version": "==1.0.1" }, @@ -393,13 +399,6 @@ "markers": "extra == 'validation'", "version": "==6.13.2" }, - "flower": { - "hashes": [ - "sha256:a7a828c2dbea7e9cff1c86d63626f0eeb047b1b1e9a0ee5daad30771fb51e6d0" - ], - "index": "pypi", - "version": "==0.9.2" - }, "gunicorn": { "hashes": [ "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", @@ -471,11 +470,36 @@ }, "markupsafe": { "hashes": [ - "sha256:284146e3bc80e9744994d52e16e1b05c662351f3addcc99db9bcdc85a70ef786", - "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665", - "sha256:db7e2fd26ed950c7c5c6d35ce017bb0298e4832ba69084e9f740a16ae5656fee" + "sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", + "sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", + "sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", + "sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", + "sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", + "sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", + "sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", + "sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", + "sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", + "sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", + "sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", + "sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", + "sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", + "sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", + "sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", + "sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", + "sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", + "sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", + "sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", + "sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", + "sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", + "sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", + "sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", + "sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", + "sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", + "sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", + "sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", + "sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1" ], - "version": "==1.0" + "version": "==1.1.0" }, "oauthlib": { "hashes": [ @@ -486,9 +510,9 @@ }, "openpyxl": { "hashes": [ - "sha256:022c0f3fa1e873cc0ba20651c54dd5e6276fc4ff150b4060723add4fc448645e" + "sha256:41eb21a5620343d715b38081536c4ed3c37249afb72e569fd2af93852ed4ddde" ], - "version": "==2.5.9" + "version": "==2.5.10" }, "psutil": { "hashes": [ @@ -506,74 +530,39 @@ }, "psycopg2": { "hashes": [ - "sha256:0b9e48a1c1505699a64ac58815ca99104aacace8321e455072cee4f7fe7b2698", - "sha256:0f4c784e1b5a320efb434c66a50b8dd7e30a7dc047e8f45c0a8d2694bfe72781", - "sha256:0fdbaa32c9eb09ef09d425dc154628fca6fa69d2f7c1a33f889abb7e0efb3909", - "sha256:11fbf688d5c953c0a5ba625cc42dea9aeb2321942c7c5ed9341a68f865dc8cb1", - "sha256:19eaac4eb25ab078bd0f28304a0cb08702d120caadfe76bb1e6846ed1f68635e", - "sha256:3232ec1a3bf4dba97fbf9b03ce12e4b6c1d01ea3c85773903a67ced725728232", - "sha256:36f8f9c216fcca048006f6dd60e4d3e6f406afde26cfb99e063f137070139eaf", - "sha256:59c1a0e4f9abe970062ed35d0720935197800a7ef7a62b3a9e3a70588d9ca40b", - "sha256:6506c5ff88750948c28d41852c09c5d2a49f51f28c6d90cbf1b6808e18c64e88", - "sha256:6bc3e68ee16f571681b8c0b6d5c0a77bef3c589012352b3f0cf5520e674e9d01", - "sha256:6dbbd7aabbc861eec6b910522534894d9dbb507d5819bc982032c3ea2e974f51", - "sha256:6e737915de826650d1a5f7ff4ac6cf888a26f021a647390ca7bafdba0e85462b", - "sha256:6ed9b2cfe85abc720e8943c1808eeffd41daa73e18b7c1e1a228b0b91f768ccc", - "sha256:711ec617ba453fdfc66616db2520db3a6d9a891e3bf62ef9aba4c95bb4e61230", - "sha256:844dacdf7530c5c612718cf12bc001f59b2d9329d35b495f1ff25045161aa6af", - "sha256:86b52e146da13c896e50c5a3341a9448151f1092b1a4153e425d1e8b62fec508", - "sha256:985c06c2a0f227131733ae58d6a541a5bc8b665e7305494782bebdb74202b793", - "sha256:a86dfe45f4f9c55b1a2312ff20a59b30da8d39c0e8821d00018372a2a177098f", - "sha256:aa3cd07f7f7e3183b63d48300666f920828a9dbd7d7ec53d450df2c4953687a9", - "sha256:b1964ed645ef8317806d615d9ff006c0dadc09dfc54b99ae67f9ba7a1ec9d5d2", - "sha256:b2abbff9e4141484bb89b96eb8eae186d77bc6d5ffbec6b01783ee5c3c467351", - "sha256:cc33c3a90492e21713260095f02b12bee02b8d1f2c03a221d763ce04fa90e2e9", - "sha256:d7de3bf0986d777807611c36e809b77a13bf1888f5c8db0ebf24b47a52d10726", - "sha256:db5e3c52576cc5b93a959a03ccc3b02cb8f0af1fbbdc80645f7a215f0b864f3a", - "sha256:e168aa795ffbb11379c942cf95bf813c7db9aa55538eb61de8c6815e092416f5", - "sha256:e9ca911f8e2d3117e5241d5fa9aaa991cb22fb0792627eeada47425d706b5ec8", - "sha256:eccf962d41ca46e6326b97c8fe0a6687b58dfc1a5f6540ed071ff1474cea749e", - "sha256:efa19deae6b9e504a74347fe5e25c2cb9343766c489c2ae921b05f37338b18d1", - "sha256:f4b0460a21f784abe17b496f66e74157a6c36116fa86da8bf6aa028b9e8ad5fe", - "sha256:f93d508ca64d924d478fb11e272e09524698f0c581d9032e68958cfbdd41faef" - ], - "version": "==2.7.5" - }, - "psycopg2-binary": { - "hashes": [ - "sha256:04afb59bbbd2eab3148e6816beddc74348078b8c02a1113ea7f7822f5be4afe3", - "sha256:098b18f4d8857a8f9b206d1dc54db56c2255d5d26458917e7bcad61ebfe4338f", - "sha256:0bf855d4a7083e20ead961fda4923887094eaeace0ab2d76eb4aa300f4bbf5bd", - "sha256:197dda3ffd02057820be83fe4d84529ea70bf39a9a4daee1d20ffc74eb3d042e", - "sha256:278ef63afb4b3d842b4609f2c05ffbfb76795cf6a184deeb8707cd5ed3c981a5", - "sha256:3cbf8c4fc8f22f0817220891cf405831559f4d4c12c4f73913730a2ea6c47a47", - "sha256:4305aed922c4d9d6163ab3a41d80b5a1cfab54917467da8168552c42cad84d32", - "sha256:47ee296f704fb8b2a616dec691cdcfd5fa0f11943955e88faa98cbd1dc3b3e3d", - "sha256:4a0e38cb30457e70580903367161173d4a7d1381eb2f2cfe4e69b7806623f484", - "sha256:4d6c294c6638a71cafb82a37f182f24321f1163b08b5d5ca076e11fe838a3086", - "sha256:4f3233c366500730f839f92833194fd8f9a5c4529c8cd8040aa162c3740de8e5", - "sha256:5221f5a3f4ca2ddf0d58e8b8a32ca50948be9a43351fda797eb4e72d7a7aa34d", - "sha256:5c6ca0b507540a11eaf9e77dee4f07c131c2ec80ca0cffa146671bf690bc1c02", - "sha256:789bd89d71d704db2b3d5e67d6d518b158985d791d3b2dec5ab85457cfc9677b", - "sha256:7b94d29239efeaa6a967f3b5971bd0518d2a24edd1511edbf4a2c8b815220d07", - "sha256:89bc65ef3301c74cf32db25334421ea6adbe8f65601ea45dcaaf095abed910bb", - "sha256:89d6d3a549f405c20c9ae4dc94d7ed2de2fa77427a470674490a622070732e62", - "sha256:97521704ac7127d7d8ba22877da3c7bf4a40366587d238ec679ff38e33177498", - "sha256:a395b62d5f44ff6f633231abe568e2203b8fabf9797cd6386aa92497df912d9a", - "sha256:a6d32c37f714c3f34158f3fa659f3a8f2658d5f53c4297d45579b9677cc4d852", - "sha256:a89ee5c26f72f2d0d74b991ce49e42ddeb4ac0dc2d8c06a0f2770a1ab48f4fe0", - "sha256:b4c8b0ef3608e59317bfc501df84a61e48b5445d45f24d0391a24802de5f2d84", - "sha256:b5fcf07140219a1f71e18486b8dc28e2e1b76a441c19374805c617aa6d9a9d55", - "sha256:b86f527f00956ecebad6ab3bb30e3a75fedf1160a8716978dd8ce7adddedd86f", - "sha256:be4c4aa22ba22f70de36c98b06480e2f1697972d49eb20d525f400d204a6d272", - "sha256:c2ac7aa1a144d4e0e613ac7286dae85671e99fe7a1353954d4905629c36b811c", - "sha256:de26ef4787b5e778e8223913a3e50368b44e7480f83c76df1f51d23bd21cea16", - "sha256:e70ebcfc5372dc7b699c0110454fc4263967f30c55454397e5769eb72c0eb0ce", - "sha256:eadbd32b6bc48b67b0457fccc94c86f7ccc8178ab839f684eb285bb592dc143e", - "sha256:ecbc6dfff6db06b8b72ae8a2f25ff20fbdcb83cb543811a08f7cb555042aa729" + "sha256:10e391687b171878181e71736d0effe3772314a339d9ae30995ec8171a0c834e", + "sha256:1283f9d45e458c2dcb15ba89367923563f90ef636fe78ee22df75183484a0237", + "sha256:1a9c32e4d140bea225f9821d993b2e53c913e717ea97b851246aa9b300095d8f", + "sha256:1be6f2438d2b71fec7b07c3c0949dd321b04349c382907ea76b36120edec8300", + "sha256:20ca6f29e118b8dd7133e8708b3fba2881e70a4e0841f874ed23985b7201a076", + "sha256:227c115b3c1f65d61385e51ac690b91b584640aefb45bffacd4bd33d02ed7221", + "sha256:27959abe64ca1fc6d8cd11a71a1f421d8287831a3262bd4cacd43bbf43cc3c82", + "sha256:2b2daf1fe30a58300542aea679fd87d1e1c2afd36e7644837b7954fa2dbacb92", + "sha256:36e51a51f295fdf67bcf05e7b1877011a6b39e6622b0013fe31c5025241873a3", + "sha256:3992b9b914f2eb77dc07e8045d2ca979e491612808bc5c7cd68f307469acf9f6", + "sha256:39a11de2335ad45ececed43ab851d36a4c52843d756471b940804f301792781e", + "sha256:3c2afe9ef0d1649005e3ccf93c1aaccd6f8ee379530e763d3b3b77f406b7c0ae", + "sha256:3fb18e0e52807fe3a300dc1b5421aa492d5e759550918f597d61863419482535", + "sha256:55eab94de96ee9702f23283e9c8b03cfdb0001e2b14d5d2e1bd5ff8114b96b9f", + "sha256:7e95c0ab7e7e6e452586f35d4d8966b1e924c8dd2c23977e3ea4968770ff1d26", + "sha256:7f47514dbddf604f196fcfe5da955537f04691bef8124aff5632316a78d992b7", + "sha256:8345370356bb4bddf93acbcfd0357163dd6b09471937adcfb38a2fbb49bdce53", + "sha256:8bc6ecb220c0b88d3742042013129c817c44459795c97e9ce1bca70a3f37a53b", + "sha256:8df623f248be15d1725faf5f333791678775047f12f17a90d29b5d22573f5cdc", + "sha256:9645f1305e4268cc0fc88c823cd6c91de27c003e183c233a6a230e5e963039ee", + "sha256:a68719ed5be8373dd72c9e45d55f7a202285e05a2e392eaa8872a67ea47d7d20", + "sha256:aca0edf062ec09e954fdf0cc93d3a872362701210983a1442549e703aedec25d", + "sha256:b0dd2114d93d8f424bb8ae76e0dc540f104b70ca9163172c05e7700b1459d4c9", + "sha256:b2c09359d6802279efb9efb3f91a9c94567151baee95175f9b637ea628f35244", + "sha256:ca7bc37b1efb7cc25271bf10f398462ed975d95259af1406d38fcb268466e34f", + "sha256:e64235d9013ebf6319cb9654e08f5066112c34d8c4cc41186254ab9c3d6d5b9b", + "sha256:ec9be679c0065667503851141c31fa699e1cc69ded3ba8e5d3673dd5a6eb1370", + "sha256:eca00d0f91fcb44d88b12f1fd16ad138e38fa07debb79587e2b7ff1fe80d72b9", + "sha256:f256e807b8b2b45b6af60d7f2bb5194aab2f4acc861241c4d8ef942a55f5030d", + "sha256:fce7612a3bd6a7ba95799f88285653bf130bd7ca066b52674d5f850108b2aec0" ], "index": "pypi", - "version": "==2.7.5" + "version": "==2.7.6.1" }, "pycparser": { "hashes": [ @@ -649,14 +638,15 @@ "sha256:8a1900a9f2a0a44ecf6e8b5eb3e967a9909dfed219ad66df094f27f7d6f330fb", "sha256:a22ca993cea2962dbb588f9f30d0015ac4afcc45bee27d3978c0dbe9e97c6c0f" ], + "index": "pypi", "version": "==2.10.6" }, "requests": { "hashes": [ - "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", - "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" + "sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54", + "sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263" ], - "version": "==2.20.0" + "version": "==2.20.1" }, "requests-oauthlib": { "hashes": [ @@ -674,30 +664,30 @@ }, "ruamel.yaml": { "hashes": [ - "sha256:072dc57ca784e92ea4b862d9bba96fcd67eef690e9b900bdda98b94101d9574f", - "sha256:092ca763a7ec607551a319f0c5bb8ba3a72d41fb717b842bf6712ec9fba7bdc0", - "sha256:179514d1224742698b0ba0796e8a708f335a5ee236f045e3721477060af60212", - "sha256:2f07aff162737b0de070cf985b9d83ad41b89f6643db8fa24a5dd6c0b15a5a7b", - "sha256:325c566a83b01f2c2203ce1f560ed165e6bed9423db845c750a71591d019795e", - "sha256:4a2d25b2f41e4de9bacc48047f30004fc81acd13a2fe933b0e4df4e3f7c53324", - "sha256:50c60aca46a5cea8b78fe790414618d68919e15f3bc6b30eade8bea2e8346bb4", - "sha256:55e549faac1cf8896ac3d22303d1a06d3eac3bec888a3a8ee8261427d5c018a1", - "sha256:5c268be4b4c7cd5f29debaadf6eeec0babcd97a38b675771a289f095f491faeb", - "sha256:6336809db65c16141af0ce74c5dd543921df7ade7309ca1b1d29cee09a732953", - "sha256:6d8cc8ec8f7f5c3bc270834f8682bab2205f3416415cb3129713d429af74a3f0", - "sha256:88acd9556c163b50f7bc9d745f9e5c797ae45d7472347c7292d2da03678f49e5", - "sha256:8d917bcf5e759c5000d87540c866e443430c6b22340a996a8bc934a4f2e6cfc6", - "sha256:90d6da3507df6eaf954053344069947b68657a2ae60858755088df601ae13315", - "sha256:93b06f69de4651386d2f1a39fa44173017ac1f736bddde8293e51eb70a8505eb", - "sha256:9dc2a9869f45ace93bb8ecc83a308498ecf9aabd4e54561280c33d29f1f3546d", - "sha256:b53ba9d2fe0823b9f6e0159673f74317261e1cc3b8437785681938dd8ea18057", - "sha256:bc818b96b08cd1b66bff34725bafe91d20a009acfda22c690d85ecdc061c4d8f", - "sha256:c76a325efc566c8a7c92fb369fb20f4190c6e59d2b5fff77d016aa2d4dd875eb", - "sha256:e9b03d591f6591aa4094628d4e41e1bea90c1fdee42dfcc18734a74c4c47bec8", - "sha256:ed867438b72f0e9de6d57c557c1b28d5f7dce35836107695135d53b1a46825fe", - "sha256:fc73c2e2c46e59d78d9e8be8a7e7252f8ff0e18ec7b7886b7a0f7797598f431a" - ], - "version": "==0.15.76" + "sha256:0dad3f56197e28c04ab251aeb739377a5da74ea83bcac00b79232d7cef66dc7a", + "sha256:19a2757b13c2461572c7d4be61ec25caabb0fc34af48b4565c88a085f570cb76", + "sha256:39cfb38a8590df4ab940b99d512df8a61f871a25f5912727fab1f36b20a12c1d", + "sha256:475762beae3ceedc7e4a007ed12a90f97a262e7ce7239029dc3d0efcdffb2e7b", + "sha256:4d282afb28a7a09dee7df7ddffe3611411e33d05bafd6ddfd6ef40869479252c", + "sha256:54137fa40c62fd72d2646f2635e97e5173dd144a48a3bd25c0c7d743c2966162", + "sha256:54359de3ee1c8955607a050f1dbdfddd0c043f0df1e99d5b82128e3fc6b5c966", + "sha256:584a8513bc7cac176112093c8bc55e457fd4f694b267ef581d8eb1e983e77e7f", + "sha256:5953840b9852e1e2735c9d413d01ddfb768334e475cf130d70064ad8831c30ce", + "sha256:659767cb717052f98fd358ae21bbe192957535693e2c68fba27b2f3c6a14917c", + "sha256:85793c5fe321e9202eba521b0bb3e6303bcb61f6e56378f59e874ca36a7e9d5f", + "sha256:8609f1263a73da187f7febebca24b347aee7da777edc83b8625cb7af3e16b345", + "sha256:883e42e30fcd485b8195522121960cbcc4bba2cb5161cec29b3cfd6e1072eaf4", + "sha256:928146b4a6ef2ce4a5b26663ceddb4961bda00fa1c4e255e6b3e3ef9b284df04", + "sha256:a91a0f5b1e18b4335a0febc757c4a465da0ab6a4f9c672fcc917bcf738efcd66", + "sha256:c0522d1d02856a00e139e2a915c74b20ad1d38f2748ea3f2d4e087ac30322ad2", + "sha256:e3238c0b169f104fa20b217d741632228cbf9011690edabdd265deff09efb299", + "sha256:e9c0b17da17ee512f49de3683c97118877d4ff706ea2edac82f187cd4a9cca4d", + "sha256:eabf69011659182a1044e7581f8ea69e4f4511933955e71fc2f007b28e214791", + "sha256:f00a2ad9b03a8cad5ff620dc0bb7afbd2eaf0ebbf1e5477b632680a2c1d7c656", + "sha256:f6f5921a67211c6a98940d5f03474158c8e3744b26fd0bd5d60ab951aa36c002", + "sha256:fcb63dc194609d1f49e309b976976f8ddd8416e0b8942b3091d3d0a525bfd18a" + ], + "version": "==0.15.78" }, "six": { "hashes": [ @@ -745,18 +735,6 @@ "markers": "extra == 'validation'", "version": "==2.4.1" }, - "tornado": { - "hashes": [ - "sha256:0662d28b1ca9f67108c7e3b77afabfb9c7e87bde174fbda78186ecedc2499a9d", - "sha256:4e5158d97583502a7e2739951553cbd88a72076f152b4b11b64b9a10c4c49409", - "sha256:732e836008c708de2e89a31cb2fa6c0e5a70cb60492bee6f1ea1047500feaf7f", - "sha256:8154ec22c450df4e06b35f131adc4f2f3a12ec85981a203301d310abf580500f", - "sha256:8e9d728c4579682e837c92fdd98036bd5cdefa1da2aaf6acf26947e6dd0c01c5", - "sha256:d4b3e5329f572f055b587efc57d29bd051589fb5a43ec8898c77a47ec2fa2bbb", - "sha256:e5f2585afccbff22390cddac29849df463b252b711aa2ce7c5f3f342a5b3b444" - ], - "version": "==5.1.1" - }, "unicodecsv": { "hashes": [ "sha256:018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc" @@ -793,11 +771,11 @@ }, "whitenoise": { "hashes": [ - "sha256:133a92ff0ab8fb9509f77d4f7d0de493eca19c6fea973f4195d4184f888f2e02", - "sha256:32b57d193478908a48acb66bf73e7a3c18679263e3e64bfebcfac1144a430039" + "sha256:118ab3e5f815d380171b100b05b76de2a07612f422368a201a9ffdeefb2251c1", + "sha256:42133ddd5229eeb6a0c9899496bdbe56c292394bf8666da77deeb27454c0456a" ], "index": "pypi", - "version": "==4.1" + "version": "==4.1.2" } }, "develop": { @@ -883,41 +861,39 @@ }, "coverage": { "hashes": [ - "sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba", - "sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed", - "sha256:0bf8cbbd71adfff0ef1f3a1531e6402d13b7b01ac50a79c97ca15f030dba6306", - "sha256:10a46017fef60e16694a30627319f38a2b9b52e90182dddb6e37dcdab0f4bf95", - "sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640", - "sha256:23d341cdd4a0371820eb2b0bd6b88f5003a7438bbedb33688cd33b8eae59affd", - "sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162", - "sha256:2a5b73210bad5279ddb558d9a2bfedc7f4bf6ad7f3c988641d83c40293deaec1", - "sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508", - "sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249", - "sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694", - "sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a", - "sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287", - "sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1", - "sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000", - "sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1", - "sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e", - "sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5", - "sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062", - "sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba", - "sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc", - "sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc", - "sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99", - "sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653", - "sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c", - "sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558", - "sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f", - "sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9", - "sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd", - "sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d", - "sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6", - "sha256:f05a636b4564104120111800021a92e43397bc12a5c72fed7036be8556e0029e", - "sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80" - ], - "version": "==4.5.1" + "sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", + "sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", + "sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", + "sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", + "sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", + "sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", + "sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", + "sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", + "sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", + "sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", + "sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", + "sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", + "sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", + "sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", + "sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", + "sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", + "sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", + "sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", + "sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", + "sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", + "sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", + "sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", + "sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", + "sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", + "sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", + "sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", + "sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", + "sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", + "sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", + "sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", + "sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9" + ], + "version": "==4.5.2" }, "decorator": { "hashes": [ @@ -981,15 +957,13 @@ }, "faker": { "hashes": [ - "sha256:2621643b80a10b91999925cfd20f64d2b36f20bf22136bbdc749bb57d6ffe124", - "sha256:5ed822d31bd2d6edf10944d176d30dc9c886afdd381eefb7ba8b7aad86171646" + "sha256:c61a41d0dab8865b850bd00454fb11e90f3fd2a092d8bc90120d1e1c01cff906", + "sha256:f909ff9133ce0625ca388b6838190630ad7a593f87eaf058d872338a76241d5d" ], - "version": "==0.9.2" + "version": "==1.0.0" }, "fancycompleter": { "hashes": [ - "sha256:a19cfe876af0626bd2ae153db25a418396c4d2d0a56b35465dca07e0666a83d6", - "sha256:a99aee30ee655a010315e6b9c5e3d32ee671a7a5e1c2ed56b798ba264dbc06b8", "sha256:d2522f1f3512371f295379c4c0d1962de06762eb586c199620a2a5d423539b12" ], "version": "==0.8" @@ -1079,43 +1053,43 @@ }, "multidict": { "hashes": [ - "sha256:05eeab69bf2b0664644c62bd92fabb045163e5b8d4376a31dfb52ce0210ced7b", - "sha256:0c85880efa7cadb18e3b5eef0aa075dc9c0a3064cbbaef2e20be264b9cf47a64", - "sha256:136f5a4a6a4adeacc4dc820b8b22f0a378fb74f326e259c54d1817639d1d40a0", - "sha256:14906ad3347c7d03e9101749b16611cf2028547716d0840838d3c5e2b3b0f2d3", - "sha256:1ade4a3b71b1bf9e90c5f3d034a87fe4949c087ef1f6cd727fdd766fe8bbd121", - "sha256:22939a00a511a59f9ecc0158b8db728afef57975ce3782b3a265a319d05b9b12", - "sha256:2b86b02d872bc5ba5b3a4530f6a7ba0b541458ab4f7c1429a12ac326231203f7", - "sha256:3c11e92c3dfc321014e22fb442bc9eb70e01af30d6ce442026b0c35723448c66", - "sha256:4ba3bd26f282b201fdbce351f1c5d17ceb224cbedb73d6e96e6ce391b354aacc", - "sha256:4c6e78d042e93751f60672989efbd6a6bc54213ed7ff695fff82784bbb9ea035", - "sha256:4d80d1901b89cc935a6cf5b9fd89df66565272722fe2e5473168927a9937e0ca", - "sha256:4fcf71d33178a00cc34a57b29f5dab1734b9ce0f1c97fb34666deefac6f92037", - "sha256:52f7670b41d4b4d97866ebc38121de8bcb9813128b7c4942b07794d08193c0ab", - "sha256:5368e2b7649a26b7253c6c9e53241248aab9da49099442f5be238fde436f18c9", - "sha256:5bb65fbb48999044938f0c0508e929b14a9b8bf4939d8263e9ea6691f7b54663", - "sha256:60672bb5577472800fcca1ac9dae232d1461db9f20f055184be8ce54b0052572", - "sha256:669e9be6d148fc0283f53e17dd140cde4dc7c87edac8319147edd5aa2a830771", - "sha256:6a0b7a804e8d1716aa2c72e73210b48be83d25ba9ec5cf52cf91122285707bb1", - "sha256:79034ea3da3cf2a815e3e52afdc1f6c1894468c98bdce5d2546fa2342585497f", - "sha256:79247feeef6abcc11137ad17922e865052f23447152059402fc320f99ff544bb", - "sha256:81671c2049e6bf42c7fd11a060f8bc58f58b7b3d6f3f951fc0b15e376a6a5a98", - "sha256:82ac4a5cb56cc9280d4ae52c2d2ebcd6e0668dd0f9ef17f0a9d7c82bd61e24fa", - "sha256:9436267dbbaa49dad18fbbb54f85386b0f5818d055e7b8e01d219661b6745279", - "sha256:94e4140bb1343115a1afd6d84ebf8fca5fb7bfb50e1c2cbd6f2fb5d3117ef102", - "sha256:a2cab366eae8a0ffe0813fd8e335cf0d6b9bb6c5227315f53bb457519b811537", - "sha256:a596019c3eafb1b0ae07db9f55a08578b43c79adb1fe1ab1fd818430ae59ee6f", - "sha256:e8848ae3cd6a784c29fae5055028bee9bffcc704d8bcad09bd46b42b44a833e2", - "sha256:e8a048bfd7d5a280f27527d11449a509ddedf08b58a09a24314828631c099306", - "sha256:f6dd28a0ac60e2426a6918f36f1b4e2620fc785a0de7654cd206ba842eee57fd" - ], - "version": "==4.4.2" + "sha256:0573239b5241a075b944b39bdf87fb6600e3a56ad5ca6d2ba9699d62de872309", + "sha256:085b1f55327b4c8c425ce96a7fdfd6a6a1e864444a90d2107f47de4c53b6edea", + "sha256:1ee14a2e7bef872ddac61450e6128aae21304b5165d21fd04681faa3261a7b2e", + "sha256:2c1791371a973d93facccc38adf9e9c14656bf85c2beddd48329134d139b6e7f", + "sha256:2cda0064cab0e2d46b02aeee9e218066993b40d4900b07d8360f54c80eed4c5c", + "sha256:3574eef3ceb983658c3c8bef0c1b3771a2dea338b3822a0c2bec03363f1dc8bb", + "sha256:3864b26cdb1c7454809ec12fb0998989b8832ebb8423aa69ff3a1ad82b9756f3", + "sha256:3e6f7161ea60795f33b21e91b5c9fa66a3dd416f949684ade8ba8a9b193f7e50", + "sha256:3fa7944194cc96319cbbd53a1e0fb6dfe1e437efb75117828c35ce5b30d9d0c9", + "sha256:470ddec7a3ae052862af73dd39a1b1c582a1ec397f8643f09658de56a0a84ac9", + "sha256:5cb1a5926fe898451688036b5e95863c6e75110c98810584695b2403cb04522f", + "sha256:754ed617ecb736261ee3595b4a5dc035bcb5e897ce0a0148252aa8cbc2e59e60", + "sha256:79879c5c0434840d6ac5246e53d22e18c7f5b87419abb968e6357ba65386993a", + "sha256:7db4a72fa35bbe9ccaf3c856b14d89e26e8de5ca0c31604ac5970a3746182f5a", + "sha256:839676a86dbadf4a0be32ca580292c764245044eae324ebfc55362c13886d5d6", + "sha256:8805d8eec8437f9d2b3fd5c09822ef259af08ece0a19f41d2ad8d322a7a67054", + "sha256:8cbb4725aad6dc38cca571dab803f53ed76fc5cc468088636b42b539719aa5f9", + "sha256:9137d7e3db47641aa86526b60dd3d50d2066617668f8617f0c16adf92dfbaa9e", + "sha256:a6d985c3ccc1fca18cfd415406047f0984e3b07f533d50aa91c36eadb46681a9", + "sha256:af381425a02e0a235b23a685cda2d94cd0679ed8257d8a54c5f03ff2eee1fbb7", + "sha256:b7399dfd7f977c419d6e2b08d1099afe00f51454eb2ecc6b067c9eba6efb0a34", + "sha256:b8204fe2cb7199ecd568e67268a49d87f031c294e46d6fdf17bd1e544bcb81ba", + "sha256:ba973fd954f3de8e47e4be43f530729dd7e894615d3734a1b23f4de14f883142", + "sha256:c00b1423d09a73c94553f80ea52dc8c4341beae448bd4468a603263040debb17", + "sha256:ca4fab7f9590b7fa6c5dcde16356726f254456a2bb33d98828d896ba57a5eda1", + "sha256:dcb97bf0efa02a067f2a42c457dfa1548d8bc8913c12f96e26460709bc8a2ae5", + "sha256:e7d1f2671bd62064da2c7d6318c4f9307889cb85c59e00b2d1a66c2ed3bae3eb", + "sha256:ed8a1c22cbf6b0840e8b8a436bc378164a0474580968f38a0eeec8ed7cb78b75", + "sha256:f3826e28328455f62e8de193fb4ab5349ad78da693f1e002fd90d249a0cfaa8b" + ], + "version": "==4.5.0" }, "nodeenv": { "hashes": [ - "sha256:aa040ab5189bae17d272175609010be6c5b589ec4b8dbd832cc50c9e9cb7496f" + "sha256:ad8259494cf1c9034539f6cced78a1da4840a4b157e23640bc4a0c0546b0cb7a" ], - "version": "==1.3.2" + "version": "==1.3.3" }, "parso": { "hashes": [ @@ -1126,8 +1100,6 @@ }, "pdbpp": { "hashes": [ - "sha256:9ddd907997ce8a5c3ae708b1ad71aa95b3ad7010156b5df8fba82eace91a8032", - "sha256:b3a73da7dfe9bba6dfd584889ea9e3854823253134420898b1a58bf9b29ba9d6", "sha256:dde77326e4ea41439c243ed065826d53539530eeabd1b6615aae15cfbb9fda05" ], "index": "pypi", @@ -1208,11 +1180,11 @@ }, "pytest": { "hashes": [ - "sha256:a9e5e8d7ab9d5b0747f37740276eb362e6a76275d76cebbb52c6049d93b475db", - "sha256:bf47e8ed20d03764f963f0070ff1c8fda6e2671fc5dd562a4d3b7148ad60f5ca" + "sha256:488c842647bbeb350029da10325cb40af0a9c7a2fdda45aeb1dda75b60048ffb", + "sha256:c055690dfefa744992f563e8c3a654089a6aa5b8092dded9b6fafbd70b2e45a7" ], "index": "pypi", - "version": "==3.9.3" + "version": "==4.0.0" }, "pytest-cov": { "hashes": [ @@ -1238,11 +1210,11 @@ }, "pytest-django": { "hashes": [ - "sha256:49e9ffc856bc6a1bec1c26c5c7b7213dff7cc8bc6b64d624c4d143d04aff0bcf", - "sha256:b379282feaf89069cb790775ab6bbbd2bd2038a68c7ef9b84a41898e0b551081" + "sha256:deffd9d65827c582bd0a85638a0fe52f0eb65a764872ddcee9ce51cdf6ae9f55", + "sha256:fe1f71a0171f6b7edac37654da0904c9bd5ffba5221ab5a76779ab870611f41f" ], "index": "pypi", - "version": "==3.4.3" + "version": "==3.4.4" }, "pytest-echo": { "hashes": [ @@ -1290,10 +1262,10 @@ }, "requests": { "hashes": [ - "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", - "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" + "sha256:65b3a120e4329e33c9889db89c80976c5272f56ea92d3e74da8a463992e3ff54", + "sha256:ea881206e59f41dbd0bd445437d792e43906703fff75ca8ff43ccdb11f33f263" ], - "version": "==2.20.0" + "version": "==2.20.1" }, "six": { "hashes": [ @@ -1361,10 +1333,10 @@ }, "webob": { "hashes": [ - "sha256:6e231a2952604efd41fd8f76803fe50e1904c81e4619eeb9d9c6991517627721", - "sha256:b0853dad347ca3777755b6d0659bb45efbeea71f995d8a395291ef6ad5d4f8b2" + "sha256:a48315158db05df0c47fbdd061b57ba0ba85bdd0b6ea9dca87511b4b7c798e99", + "sha256:fc8c466af474e2e2775f1aef7afb902ed8b82e597eb0b13624818a34e8bfe720" ], - "version": "==1.8.3" + "version": "==1.8.4" }, "websocket-client": { "hashes": [ @@ -1382,16 +1354,13 @@ }, "wmctrl": { "hashes": [ - "sha256:08d52e128c6007ab58b0eee337e2b5335adb0597ff6239d929965abe9ce33069", - "sha256:57d4f2eaebad81483eba50f4bc3c7688ef65f4957be6f6ad90b3105621cc6cbc", "sha256:d806f65ac1554366b6e31d29d7be2e8893996c0acbb2824bbf2b1f49cf628a13" ], "version": "==0.3" }, "wrapt": { "hashes": [ - "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6", - "sha256:fd8dddec1ecb60f787d43b88296c8439db4871299ecbf9b6f3968bfadf378342" + "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6" ], "version": "==1.10.11" }, diff --git a/README.md b/README.md index ca2e678ce..3b780aced 100644 --- a/README.md +++ b/README.md @@ -3,9 +3,9 @@ eTools DataMart [![CircleCI](https://circleci.com/gh/unicef/etools-datamart/tree/develop.svg?style=svg&circle-token=)](https://circleci.com/gh/unicef/etools-datamart/tree/develop) [![Codacy Badge](https://api.codacy.com/project/badge/Grade/819135a936894e678066e895604fd24f)](https://www.codacy.com/app/UNICEF/etools-datamart?utm_source=github.com&utm_medium=referral&utm_content=unicef/etools-datamart&utm_campaign=Badge_Grade) +[![codecov](https://codecov.io/gh/unicef/etools-datamart/branch/develop/graph/badge.svg)](https://codecov.io/gh/unicef/etools-datamart) [![](https://images.microbadger.com/badges/version/unicef/datamart.svg)](https://hub.docker.com/r/unicef/datamart/) - UNICEF eTools API and Datamart diff --git a/db/update_etools_schema.sh b/db/update_etools_schema.sh index b1a5d73a9..7d1497f7b 100755 --- a/db/update_etools_schema.sh +++ b/db/update_etools_schema.sh @@ -16,6 +16,7 @@ export PGHOST=127.0.0.1 export PGPORT=5432 export DATABASE_NAME=etools export DATABASE_USER=postgres +export DATABASE_PASS= export BASE_SCHEMA=kenya help (){ @@ -30,6 +31,11 @@ help (){ echo " -ni,--no-inspect do not inspect schema" echo " -ns,--no-summary do not display summary infos" echo " -nc,--no-clean do not clean temporary files" + echo " --host database host" + echo " --port database port" + echo " --db-name database name" + echo " --db-user database username" + echo " --db-pass database password" echo " -h,--help this help screen" exit 1 } @@ -57,6 +63,31 @@ case $1 in shift shift ;; + --host) + shift + PGHOST=$1 + shift + ;; + --port) + shift + PGPORT=$1 + shift + ;; + --db-name) + shift + DATABASE_NAME=$1 + shift + ;; + --db-user) + shift + DATABASE_USER=$1 + shift + ;; + --db-pass) + shift + DATABASE_PASS=$1 + shift + ;; -nr|--no-restore) RESTORE=0 shift @@ -101,6 +132,7 @@ echo "create tests data files $DUMP" echo "move testing data files $MOVE" echo "inspect db and update ORM $INSPECT" echo "clean temporary files $CLEAN" +echo "Connection http://${DATABASE_USER}:${DATABASE_PASS}@${PGHOST}:${PGPORT}/${DATABASE_NAME}" echo echo "Running..." # 1 - restore from database dump diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 0d34500a4..000000000 --- a/docker-compose.yml +++ /dev/null @@ -1,89 +0,0 @@ -version: '2' - -services: - datamart: - # to use this docker file with local code, set env var TAG=dev - build: - context: . - dockerfile: ./docker/$DOCKERFILE - args: - - DEVELOP=1 - image: unicef/datamart:dev - volumes: - - $PWD:/code - environment: - CACHE_URL: redis://redis:6379/1 - CELERY_BROKER_URL: redis://redis:6379/2 - CELERY_RESULT_BACKEND: redis://redis:6379/3 - DATABASE_URL: postgres://postgres:@db:5432/etools_datamart - DATABASE_URL_ETOOLS: postgis://postgres:@etools:5432/etools - SUPERVISOR_USER: admin - SUPERVISOR_PWD: "" - DEBUG: "true" - STATIC_ROOT: /tmp/static - SECURE_SSL_REDIRECT: "false" - SESSION_COOKIE_SECURE: "false" - CSRF_COOKIE_SECURE: "false" - DEVELOPMENT_MODE: "false" - STACK: "0" - command: datamart - restart: "no" - ports: - - "8000:8000" - - "15000:15000" - depends_on: - - etools - - db - - redis - links: - - db:db - - etools:etools - - redis:redis - - db: - image: mdillon/postgis:9.5 - environment: - POSTGRES_PASSWORD: - POSTGRES_DB: etools_datamart - - etools: -# this image should be created running `make build` into `./db` directory - image: unicef/etools-db:dev - volumes: - - /data/storage/etools-db/data:/var/lib/postgresql/data - - redis: - image: redis:alpine - - - celery: - image: unicef/datamart:$TAG - ports: - - 5555:5555 - - 25000:15000 - environment: - CACHE_URL: redis://redis:6379/1 - CELERY_BROKER_URL: redis://redis:6379/2 - CELERY_RESULT_BACKEND: redis://redis:6379/3 - DATABASE_URL: postgres://postgres:@db:5432/etools_datamart - DATABASE_URL_ETOOLS: postgis://postgres:@etools:5432/etools - SUPERVISOR_USER: $SUPERVISOR_USER - SUPERVISOR_PWD: $SUPERVISOR_PWD - FLOWER_USER: $FLOWER_USER - FLOWER_PWD: $FLOWER_PWD - DEBUG: "true" - SECURE_SSL_REDIRECT: "false" - SESSION_COOKIE_SECURE: "false" - CSRF_COOKIE_SECURE: "false" - DEVELOPMENT_MODE: "false" - STACK: "0" - command: celery - depends_on: - - redis - - db - - etools - - uniset: - image: amancevice/superset - volumes: - - $PWD/docker:/etc/superset/ diff --git a/docker/Dockerfile b/docker/Dockerfile index a53c45a94..85e7d9744 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -13,14 +13,29 @@ RUN set -o pipefail && if [ "${DEVELOP}" = "1" ]; then \ && curl ${GITHUB_CREDENTIALS}: -L "https://github.com/unicef/etools-datamart/archive/${VERSION}.tar.gz" | tar -xzf - --strip-components=1; \ fi -FROM python:3.6.4 +FROM python:3.6.7-alpine as base COPY --from=builder /code /code -LABEL org.label-schema.name="" \ - org.label-schema.description="" \ - org.label-schema.url="" \ - org.label-schema.vcs-url="https://github.com/unicef/etools-datamart" \ - org.label-schema.version=$VERSION \ - org.label-schema.schema-version="1.0" +RUN apk add --no-cache --virtual .build-deps \ + gcc \ + libffi-dev \ + linux-headers \ + musl-dev \ + postgresql-dev \ + python3-dev + + +RUN apk add --no-cache --virtual .deps \ + bash + +FROM base + +LABEL org.label.name="eTools Datamart" \ + org.label.maintainer="sapostolico@unicef.org" \ + org.label.description="" \ + org.label.url="https://datamart.unicef.io/" \ + org.label.vcs-url="https://github.com/unicef/etools-datamart" \ + org.label.version=$VERSION + ARG BUILD_DATE ARG PIPENV_PYPI_MIRROR @@ -30,6 +45,7 @@ ARG VERSION ENV VERSION ${VERSION} ENV PIPENV_PYPI_MIRROR ${PIPENV_PYPI_MIRROR} +ENV PIPENV_ARGS ${PIPENV_ARGS} ENV HOME /root/ ENV PIPSI_HOME=/usr/local/pipsi/environments ENV PIPSI_BIN_DIR=/usr/local/bin @@ -63,20 +79,17 @@ ENV SENTRY_DSN "" ENV SESSION_COOKIE_HTTPONLY True ENV SESSION_COOKIE_SECURE True ENV STATIC_ROOT /tmp/static -ENV SUPERVISOR_USER admin -ENV SUPERVISOR_PWD "" -ENV FLOWER_USER admin -ENV FLOWER_PWD "" -ENV X_FRAME_OPTIONS "DENY" -ENV START_DATAMART "true" -ENV START_REDIS "true" -ENV START_CELERY "true" - -RUN apt-get update && apt-get install -y --force-yes \ - curl \ - supervisor \ - redis-server \ - gcc +#ENV SUPERVISOR_USER admin +#ENV SUPERVISOR_PWD "" +#ENV FLOWER_USER admin +#ENV FLOWER_PWD "" +#ENV X_FRAME_OPTIONS "DENY" +#ENV START_DATAMART "true" +#ENV START_REDIS "true" +#ENV START_CELERY "true" + +#RUN apt-get update && apt-get install -y --force-yes \ +# gcc RUN mkdir -p \ /var/datamart/{static,log,conf,run} \ @@ -86,25 +99,23 @@ WORKDIR /code RUN set -ex \ ls -al /code \ - && pipenv install --system --deploy --ignore-pipfile $PIPENV_ARGS \ - && pip install . \ - && rm -fr /code + && pipenv install --system --deploy --ignore-pipfile $PIPENV_ARGS -RUN apt-get autoremove --yes --force-yes \ - gcc \ - && rm -fr /var/lib/apt/lists/* \ - && rm -fr /var/cache/apt/* +RUN pip install . \ + && rm -fr /code +#RUN apt-get autoremove --yes --force-yes \ +# gcc \ +# && rm -fr /var/lib/apt/lists/* \ +# && rm -fr /var/cache/apt/* -ADD docker/redis.conf /etc/redis.conf -ADD docker/supervisord.conf /etc/supervisord.conf +#RUN apk del .build-deps \ +# && rm -rf /var/cache/apk/* \ +# && rm -fr /root/.cache/ WORKDIR /var/datamart EXPOSE 8000 -EXPOSE 15000 -EXPOSE 5555 -EXPOSE 6379 ADD docker/entrypoint.sh /usr/local/bin/docker-entrypoint.sh ENTRYPOINT ["docker-entrypoint.sh"] diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev deleted file mode 100644 index 9fbfacb61..000000000 --- a/docker/Dockerfile.dev +++ /dev/null @@ -1,17 +0,0 @@ -FROM unicef/datamart:latest - -WORKDIR /code -ADD . /code - -RUN apt-get update && apt-get install -y \ - curl \ - supervisor \ - redis-server \ - gcc - -RUN pipenv install -v --system --ignore-pipfile --deploy --pypi-mirror $PYPI_INDEX \ - && pip uninstall -y etools-datamart \ - && pip install -e . - -ADD docker/entrypoint.sh /usr/local/bin/docker-entrypoint.sh -ENTRYPOINT ["docker-entrypoint.sh"] diff --git a/docker/Makefile b/docker/Makefile index 42f13edca..f8e32a82a 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -1,14 +1,21 @@ +# envs to set before use this script +DATABASE_URL?= +DATABASE_URL_ETOOLS?= +DEVELOP?=0 +DOCKER_PASS?=${DOCKER_PASS} +DOCKER_USER?=${DOCKER_USER} +TARGET?=dev +# below var are used internally BUILD_OPTIONS?= CMD?=datamart -TARGET?=dev -DEVELOP?=0 CONTAINER_NAME?=datamart-${TARGET} -DATABASE_URL?=postgres://postgres:@192.168.66.66:5432/etools_datamart -DATABASE_URL_ETOOLS?=postgis://postgres:@192.168.66.66:15432/etools -DOCKER_IMAGE_NAME=unicef/datamart +ORGANIZATION=unicef +IMAGE_NAME=datamart +DOCKER_IMAGE_NAME=${ORGANIZATION}/${IMAGE_NAME} DOCKER_IMAGE=${DOCKER_IMAGE_NAME}:${TARGET} DOCKERFILE?=Dockerfile RUN_OPTIONS?= +PIPENV_ARGS?= help: @echo "dev build dev image (based on local code)" @@ -18,7 +25,8 @@ help: build: - cd .. && docker build ${BUILD_OPTIONS} \ + cd .. && docker build \ + ${BUILD_OPTIONS} \ --build-arg DEVELOP=${DEVELOP} \ --build-arg GITHUB_CREDENTIALS=${GITHUB_CREDENTIALS} \ --build-arg VERSION=${TARGET} \ @@ -31,8 +39,6 @@ build: --rm \ --name=${CONTAINER_NAME} \ -p 8000:8000 \ - -p 5555:5555 \ - -p 15000:15000 \ -e CACHE_URL=redis://127.0.0.1:6379/1 \ -e CELERY_BROKER_URL=redis://127.0.0.1:6379/2 \ -e CELERY_RESULT_BACKEND=redis://127.0.0.1:6379/3 \ @@ -88,3 +94,7 @@ scratch: shell: # docker exec -it ${CONTAINER_NAME} /bin/bash RUN_OPTIONS=-it CMD='/bin/bash' $(MAKE) .run + +docker-remove: + docker-remove.sh ${IMAGE_NAME} -o ${ORGANIZATION} + diff --git a/docker/README.md b/docker/README.md index c2fa3337e..dc09ae30e 100644 --- a/docker/README.md +++ b/docker/README.md @@ -10,13 +10,11 @@ To build docker image simply cd in `docker` directory and run default settings are for production ready environment, check `run` target in the `Makefile` to see how to run the container with debug/less secure configuration -Image launches following components: +Image provides following services: - - supervisord - - datamart (API/GUI/ETL) - - redis server - - celery (workers/beat) - - flower + - datamart + - celery workers + - celery beat to configure which services should be started, set `SERVICES` appropriately, ie: diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 42fa1f345..8c16ad6ca 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -1,41 +1,22 @@ #!/bin/bash -e set -e -mkdir -p /var/datamart/{static,log,conf,run,redis} +mkdir -p /var/datamart/{static,log,conf,run} rm -f /var/datamart/run/* -if [ "$@" == "celery" ];then - export START_REDIS="false" - export START_DATAMART="false" - export START_CELERY="true" - +if [ "$*" == "workers" ];then django-admin db-isready --wait --timeout 60 --sleep 5 django-admin db-isready --wait --timeout 300 --sleep 5 --connection etools - - cd /var/datamart - - exec supervisord --nodaemon -elif [ "$@" == "datamart" ];then - - django-admin db-isready --wait --timeout 60 - django-admin check --deploy - django-admin init-setup --all --verbosity 1 - django-admin db-isready --wait --timeout 300 --connection etools - - if [ "$DEVELOPMENT_MODE" == "1" ];then - python /code/manage.py runserver 0.0.0.0:8000 - else - gunicorn -b 0.0.0.0:8000 etools_datamart.config.wsgi - fi -elif [ "$@" == "stack" ];then + celery worker -A etools_datamart --loglevel=DEBUG --concurrency=4 --purge --pidfile run/celery.pid +elif [ "$*" == "beat" ];then + celery beat -A etools_datamart.celery --loglevel=DEBUG --pidfile run/celerybeat.pid +elif [ "$*" == "datamart" ];then django-admin db-isready --wait --timeout 60 django-admin check --deploy django-admin init-setup --all --verbosity 1 django-admin db-isready --wait --timeout 300 --connection etools - - cd /var/datamart - exec supervisord --nodaemon + gunicorn -b 0.0.0.0:8000 etools_datamart.config.wsgi else exec "$@" fi diff --git a/docker/redis.conf b/docker/redis.conf deleted file mode 100644 index 4fec095ec..000000000 --- a/docker/redis.conf +++ /dev/null @@ -1,20 +0,0 @@ -port 6379 -bind 127.0.0.1 -loglevel notice - -#logfile /var/log/redis.log - -databases 16 -daemonize no -dbfilename redis.rdb - -save 900 1 -save 300 10 -save 60 10000 - -dir /var/datamart/redis -pidfile /var/run/redis.pid - -#requirepass 123 - -rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52 diff --git a/docker/supervisord.conf b/docker/supervisord.conf deleted file mode 100644 index 5fcc2b897..000000000 --- a/docker/supervisord.conf +++ /dev/null @@ -1,95 +0,0 @@ -[supervisorctl] -serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket - -[unix_http_server] -file=/tmp/supervisor.sock ; (the path to the socket file) - -[rpcinterface:supervisor] -supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface - -[inet_http_server] ; inet (TCP) server disabled by default -port=*:15000 ; (ip_address:port specifier, *:port for all iface) -username=%(ENV_SUPERVISOR_USER)s ; (default is no username (open server)) -password=%(ENV_SUPERVISOR_PWD)s ; (default is no password (open server)) - - -[supervisord] -childlogdir=log ; where child log files will live -directory=/var/datamart -environment=DJANGO_SETTINGS_MODULE="etools_datamart.config.settings" -logfile=log/supervisord.log ; (main log file;default $CWD/supervisord.log) -logfile_backups=1 -logfile_maxbytes=512Kb -loglevel=error ; (log level;default info; others: debug,warn,trace) -minfds=1024 ; (min. avail startup file descriptors;default 1024) -minprocs=200 ; (min. avail process descriptors;default 200) -nodaemon=false ; (start in foreground if true;default false) -pidfile=run/supervisord.pid ; (supervisord pidfile;default supervisord.pid) - -[program:datamart] -directory=/var/datamart -command=gunicorn -b 0.0.0.0:8000 etools_datamart.config.wsgi -autostart=%(ENV_START_DATAMART)s -autorestart=true -stderr_logfile=log/err.log -stdout_logfile=out.log - -[program:redis] -command=redis-server /etc/redis.conf -autostart=%(ENV_START_REDIS)s -autorestart=false -redirect_stderr=true -stdout_logfile=log/redis.log -logfile_maxbytes=512Kb -priority=200 - - -;[program:redmon] -;command=redmon -;autostart=true -;autorestart=false -;redirect_stderr=true -;stdout_logfile=log/redmon.log -;priority=790 - - -[program:workers] -command=celery worker -A etools_datamart --loglevel=DEBUG --concurrency=4 --purge --pidfile run/celery.pid -autostart=%(ENV_START_CELERY)s -autorestart=false -numprocs=1 -redirect_stderr=true -#stdout_logfile=celery.log -logfile_maxbytes=512Kb -priority=700 -startretries=3 -startsecs=10 -stopwaitsecs=600 -; Need to wait for currently executing tasks to finish at shutdown. -; Increase this if you have very long running tasks. - - -[program:beat] -command=celery beat -A etools_datamart --loglevel=DEBUG --pidfile run/celerybeat.pid -autostart=%(ENV_START_CELERY)s -autorestart=false -numprocs=1 -redirect_stderr=true -#stdout_logfile=celery-beat.log -logfile_maxbytes=512Kb -priority=700 -startretries=3 -startsecs=10 -stopwaitsecs=600 -; Need to wait for currently executing tasks to finish at shutdown. -; Increase this if you have very long running tasks. - - -[program:flower] -command=celery flower -A etools_datamart --port=5555 --loglevel=DEBUG --inspect --basic_auth=%(ENV_FLOWER_USER)s:%(ENV_FLOWER_PWD)s -autostart=%(ENV_START_CELERY)s -numprocs=1 -autorestart=false -redirect_stderr=true -#stdout_logfile=flower.log -priority=600 diff --git a/src/etools_datamart/__init__.py b/src/etools_datamart/__init__.py index 8ff6ef56b..b925ae7f5 100644 --- a/src/etools_datamart/__init__.py +++ b/src/etools_datamart/__init__.py @@ -1,3 +1,3 @@ NAME = 'etools-datamart' -VERSION = __version__ = '1.5' +VERSION = __version__ = '1.6' __author__ = '' diff --git a/src/etools_datamart/api/cache.py b/src/etools_datamart/api/cache.py deleted file mode 100644 index af767f847..000000000 --- a/src/etools_datamart/api/cache.py +++ /dev/null @@ -1,100 +0,0 @@ -from django.core.cache import caches -from django.utils.http import quote_etag -from rest_framework_extensions.cache.decorators import CacheResponse -from rest_framework_extensions.etag.decorators import ETAGProcessor -from rest_framework_extensions.key_constructor import bits -from rest_framework_extensions.key_constructor.bits import KeyBitBase -from rest_framework_extensions.key_constructor.constructors import KeyConstructor -from rest_framework_extensions.settings import extensions_api_settings -from unicef_rest_framework.cache import parse_ttl - -from etools_datamart.state import state - - -class CacheVersionKeyBit(KeyBitBase): - def get_data(self, params, view_instance, view_method, request, args, kwargs): - version = view_instance.get_service().cache_version - state.set('cache-version', version) - return {'cache_version': str(version)} - - -class ListKeyConstructor(KeyConstructor): - cache_version = CacheVersionKeyBit() - # system_filter = SystemFilterKeyBit() - - unique_method_id = bits.UniqueMethodIdKeyBit() - format = bits.FormatKeyBit() - headers = bits.HeadersKeyBit(['Accept']) - # language = bits.LanguageKeyBit() - list_sql_query = bits.ListSqlQueryKeyBit() - querystring = bits.QueryParamsKeyBit() - pagination = bits.PaginationKeyBit() - - def get_key(self, view_instance, view_method, request, args, kwargs): - key = super().get_key(view_instance, view_method, request, args, kwargs) - state.set('cache-key', key) - return key - - -class APIETAGProcessor(ETAGProcessor): - def is_if_none_match_failed(self, res_etag, etags, if_none_match): - if res_etag and if_none_match: - return quote_etag(res_etag) in etags or '*' in etags - else: - return False - - -class APICacheResponse(CacheResponse): - def __init__(self, - timeout=None, - key_func=None, - cache=None, - cache_errors=None): - self.cache_name = cache or extensions_api_settings.DEFAULT_USE_CACHE - super(APICacheResponse, self).__init__(timeout=timeout, key_func=key_func, - cache=cache, cache_errors=cache_errors) - - # @staticmethod - # def get_cache(name): - # from django.core.cache import caches - # return caches[name] - - def process_cache_response(self, - view_instance, - view_method, - request, - args, - kwargs): - key = self.calculate_key( - view_instance=view_instance, - view_method=view_method, - request=request, - args=args, - kwargs=kwargs - ) - cache = caches[self.cache_name] - response = cache.get(key) - if not response: - state.set('cache-hit', False) - response = view_method(view_instance, request, *args, **kwargs) - response = view_instance.finalize_response(request, response, *args, **kwargs) - response.render() # should be rendered, before picklining while storing to cache - - if not response.status_code >= 400 or self.cache_errors: # pragma: no cover - cache.set(key, response, parse_ttl(view_instance.get_service().cache_ttl or '1y')) - else: - state.set('cache-hit', True) - request._request.service = view_instance.get_service() - request._request.viewset = view_instance - # state.set('service', view_instance.get_service().name) - # state.set('viewset', fqn(view_instance)) - state.set('cache-ttl', view_instance.get_service().cache_ttl) - - if not hasattr(response, '_closable_objects'): # pragma: no cover - response._closable_objects = [] - - return response - - -etag = APIETAGProcessor -cache_response = APICacheResponse diff --git a/src/etools_datamart/api/endpoints/common.py b/src/etools_datamart/api/endpoints/common.py index 0f5fce262..664aafba6 100644 --- a/src/etools_datamart/api/endpoints/common.py +++ b/src/etools_datamart/api/endpoints/common.py @@ -2,27 +2,20 @@ import coreapi import coreschema -import rest_framework_extensions.utils from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.db import connections from django.http import Http404 from drf_querystringfilter.exceptions import QueryFilterException -from drf_renderer_xlsx.renderers import XLSXRenderer from dynamic_serializer.core import DynamicSerializerMixin from rest_framework.exceptions import NotAuthenticated, PermissionDenied from rest_framework.filters import OrderingFilter -from rest_framework.renderers import JSONRenderer from rest_framework.response import Response from unicef_rest_framework.filtering import SystemFilterBackend from unicef_rest_framework.views import ReadOnlyModelViewSet -from etools_datamart.api.cache import cache_response, etag, ListKeyConstructor from etools_datamart.api.filtering import DatamartQueryStringFilterBackend, TenantQueryStringFilterBackend -from etools_datamart.api.renderers import CSVRenderer from etools_datamart.apps.multitenant.exceptions import InvalidSchema, NotAuthorizedSchema -from ..renderers import APIBrowsableAPIRenderer - __all__ = ['APIMultiTenantReadOnlyModelViewSet'] @@ -51,17 +44,6 @@ def build_description(self): class APIReadOnlyModelViewSet(ReadOnlyModelViewSet): - object_cache_key_func = rest_framework_extensions.utils.default_object_cache_key_func - list_cache_key_func = ListKeyConstructor() - - object_etag_func = rest_framework_extensions.utils.default_object_etag_func - list_etag_func = ListKeyConstructor() - - renderer_classes = [JSONRenderer, - APIBrowsableAPIRenderer, - CSVRenderer, - XLSXRenderer, - ] filter_backends = [SystemFilterBackend, DatamartQueryStringFilterBackend, OrderingFilter] @@ -118,16 +100,6 @@ def get_object(self): return super().get_object() - @etag(etag_func='object_etag_func') - @cache_response(key_func='object_cache_key_func', cache='api') - def retrieve(self, request, *args, **kwargs): - return super(APIReadOnlyModelViewSet, self).retrieve(request, *args, **kwargs) - - @etag(etag_func='list_etag_func') - @cache_response(key_func='list_cache_key_func', cache='api') - def list(self, request, *args, **kwargs): - return super(APIReadOnlyModelViewSet, self).list(request, *args, **kwargs) - def one_schema(func): @wraps(func) diff --git a/src/etools_datamart/api/endpoints/datamart/__init__.py b/src/etools_datamart/api/endpoints/datamart/__init__.py index 1a34dddf0..ac2b0e94b 100644 --- a/src/etools_datamart/api/endpoints/datamart/__init__.py +++ b/src/etools_datamart/api/endpoints/datamart/__init__.py @@ -3,3 +3,4 @@ from .intervention import InterventionViewSet # noqa from .famindicator import FAMIndicatorViewSet # noqa from .user import UserStatsViewSet # noqa +from .hact import HACTViewSet # noqa diff --git a/src/etools_datamart/api/endpoints/datamart/hact.py b/src/etools_datamart/api/endpoints/datamart/hact.py new file mode 100644 index 000000000..8cbebff2a --- /dev/null +++ b/src/etools_datamart/api/endpoints/datamart/hact.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- +from etools_datamart.apps.data import models + +from . import serializers +from .. import common + + +class HACTViewSet(common.APIReadOnlyModelViewSet): + serializer_class = serializers.HACTSerializer + queryset = models.HACT.objects.all() + filter_fields = ('country_name', 'month') diff --git a/src/etools_datamart/api/endpoints/datamart/serializers.py b/src/etools_datamart/api/endpoints/datamart/serializers.py index cf6fe0b17..4b7539a54 100644 --- a/src/etools_datamart/api/endpoints/datamart/serializers.py +++ b/src/etools_datamart/api/endpoints/datamart/serializers.py @@ -34,3 +34,10 @@ class Meta: def get_month(self, obj): return datetime.strftime(obj.month._date, '%b %Y') + + +class HACTSerializer(serializers.ModelSerializer): + + class Meta: + model = models.HACT + exclude = () diff --git a/src/etools_datamart/api/endpoints/datamart/user.py b/src/etools_datamart/api/endpoints/datamart/user.py index 4f53847e3..569d037c1 100644 --- a/src/etools_datamart/api/endpoints/datamart/user.py +++ b/src/etools_datamart/api/endpoints/datamart/user.py @@ -9,6 +9,3 @@ class UserStatsViewSet(common.APIReadOnlyModelViewSet): serializer_class = serializers.UserStatsSerializer queryset = models.UserStats.objects.all() filter_fields = ('country_name', 'month') - - def drf_ignore_filter(self, request, field): - return super().drf_ignore_filter(request, field) diff --git a/src/etools_datamart/api/endpoints/system/serializers.py b/src/etools_datamart/api/endpoints/system/serializers.py index edd771573..d6736fc30 100644 --- a/src/etools_datamart/api/endpoints/system/serializers.py +++ b/src/etools_datamart/api/endpoints/system/serializers.py @@ -6,5 +6,5 @@ class TaskLogSerializer(serializers.ModelSerializer): class Meta: - model = models.TaskLog + model = models.EtlTask exclude = ('content_type', 'id') diff --git a/src/etools_datamart/api/endpoints/system/task_log.py b/src/etools_datamart/api/endpoints/system/task_log.py index 3e041b6e5..25306b949 100644 --- a/src/etools_datamart/api/endpoints/system/task_log.py +++ b/src/etools_datamart/api/endpoints/system/task_log.py @@ -11,6 +11,6 @@ class TaskLogViewSet(common.APIReadOnlyModelViewSet): """ pagination_class = None serializer_class = serializers.TaskLogSerializer - queryset = models.TaskLog.objects.all() + queryset = models.EtlTask.objects.all() filter_fields = ('task', 'table_name', 'result', 'last_success', 'last_failure') diff --git a/src/etools_datamart/api/filtering.py b/src/etools_datamart/api/filtering.py index 4ade0d55e..51c9164de 100644 --- a/src/etools_datamart/api/filtering.py +++ b/src/etools_datamart/api/filtering.py @@ -7,7 +7,8 @@ from etools_datamart.apps.etools.utils import get_etools_allowed_schemas, validate_schemas from etools_datamart.apps.multitenant.exceptions import NotAuthorizedSchema -from etools_datamart.state import state + +# from unicef_rest_framework.state import state months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', @@ -22,7 +23,7 @@ def process_country_name(self, efilters, eexclude, field, value, request, if not value: if not request.user.is_superuser: allowed = get_etools_allowed_schemas(request.user) - if not allowed: + if not allowed: # pragma: no cover raise PermissionDenied("You don't have enabled schemas") filters['country_name__iregex'] = r'(' + '|'.join(allowed) + ')' else: @@ -61,10 +62,12 @@ def process_month(self, filters, exclude, field, value, **payload): elif value == 'current': m = datetime.now().month y = datetime.now().year + else: # pragma: no cover + raise InvalidQueryValueError('month', value) filters['month__month'] = int(m) filters['month__year'] = int(y) - except ValueError: + except ValueError: # pragma: no cover raise InvalidQueryValueError('month', value) return filters, exclude @@ -73,8 +76,10 @@ class SetHeaderMixin: # must be the first one def filter_queryset(self, request, queryset, view): ret = super().filter_queryset(request, queryset, view) - state.set('filters', self.filters) - state.set('excludes', self.exclude) + request._filters = self.filters + request._exclude = self.exclude + # state.set('filters', self.filters) + # state.set('excludes', self.exclude) return ret diff --git a/src/etools_datamart/api/metadata.py b/src/etools_datamart/api/metadata.py index d01572d53..ef1e46caa 100644 --- a/src/etools_datamart/api/metadata.py +++ b/src/etools_datamart/api/metadata.py @@ -1,12 +1,79 @@ from rest_framework.metadata import SimpleMetadata +def get_create_model(editor, model): # pragma: no cover + """ + Create a table and any accompanying indexes or unique constraints for + the given `model`. + """ + # Create column SQL, add FK deferreds if needed + column_sqls = [] + params = [] + for field in model._meta.local_fields: + # SQL + definition, extra_params = editor.column_sql(model, field) + if definition is None: + continue + # Check constraints can go on the column SQL here + db_params = field.db_parameters(connection=editor.connection) + if db_params['check']: + definition += " CHECK (%s)" % db_params['check'] + # Autoincrement SQL (for backends with inline variant) + col_type_suffix = field.db_type_suffix(connection=editor.connection) + if col_type_suffix: + definition += " %s" % col_type_suffix + params.extend(extra_params) + # FK + if field.remote_field and field.db_constraint: + to_table = field.remote_field.model._meta.db_table + to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column + if editor.sql_create_inline_fk: + definition += " " + editor.sql_create_inline_fk % { + "to_table": editor.quote_name(to_table), + "to_column": editor.quote_name(to_column), + } + elif editor.connection.features.supports_foreign_keys: + editor.deferred_sql.append(editor._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s")) + # Add the SQL to our big list + column_sqls.append("%s %s" % ( + editor.quote_name(field.column), + definition, + )) + # Autoincrement SQL (for backends with post table definition variant) + if field.get_internal_type() in ("AutoField", "BigAutoField"): + autoinc_sql = editor.connection.ops.autoinc_sql(model._meta.db_table, field.column) + if autoinc_sql: + editor.deferred_sql.extend(autoinc_sql) + + # Add any unique_togethers (always deferred, as some fields might be + # created afterwards, like geometry fields with some backends) + for fields in model._meta.unique_together: + columns = [model._meta.get_field(field).column for field in fields] + editor.deferred_sql.append(editor._create_unique_sql(model, columns)) + # Make the table + sql = editor.sql_create_table % { + "table": editor.quote_name(model._meta.db_table), + "definition": ", ".join(column_sqls) + } + if model._meta.db_tablespace: + tablespace_sql = editor.connection.ops.tablespace_sql(model._meta.db_tablespace) + if tablespace_sql: + sql += ' ' + tablespace_sql + return sql + + class SimpleMetadataWithFilters(SimpleMetadata): """Override SimpleMetadata, adding info about filters""" def determine_metadata(self, request, view): metadata = super(SimpleMetadataWithFilters, self).determine_metadata(request, view) - metadata['filters'] = view.filter_fields - metadata['filter_blacklist'] = view.filter_blacklist - metadata['ordering'] = view.ordering_fields + metadata['filters'] = getattr(view, 'filter_fields') + metadata['filter_blacklist'] = getattr(view, 'filter_blacklist') + metadata['ordering'] = getattr(view, 'ordering_fields') + metadata['serializers'] = getattr(view, 'serializers_fieldsets') + # from django.db import connection + # with connection.schema_editor() as editor: + # sql = get_create_model(editor, view.queryset.model) + # metadata['sql'] = sql + return metadata diff --git a/src/etools_datamart/api/middleware.py b/src/etools_datamart/api/middleware.py deleted file mode 100644 index 4c892ced5..000000000 --- a/src/etools_datamart/api/middleware.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -import logging -import threading - -from etools_datamart.state import state - -logger = logging.getLogger(__name__) - -_thread_locals = threading.local() - - -class ApiMiddleware(object): - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - # Code to be executed for each request before - # the view (and later middleware) are called. - state.request = request - response = self.get_response(request) - - # Code to be executed for each request/response after - # the view is called. - response['filters'] = state.get('filters') - response['excludes'] = state.get('excludes') - response['system-filters'] = getattr(state.request, '_system_filter', '') - response['cache-key'] = state.get('cache-key') - response['cache-hit'] = state.get('cache-hit') - response['cache-ttl'] = state.get('cache-ttl') - response['cache-version'] = state.get('cache-version') - return response diff --git a/src/etools_datamart/api/renderers.py b/src/etools_datamart/api/renderers.py index b782d81db..f866af2e8 100644 --- a/src/etools_datamart/api/renderers.py +++ b/src/etools_datamart/api/renderers.py @@ -1,32 +1,10 @@ # -*- coding: utf-8 -*- import logging -from rest_framework_csv import renderers as r from unicef_rest_framework.renderers import APIBrowsableAPIRenderer as _BrowsableAPIRenderer -from etools_datamart.state import state - logger = logging.getLogger(__name__) class APIBrowsableAPIRenderer(_BrowsableAPIRenderer): - def get_context(self, data, accepted_media_type, renderer_context): - ctx = super(APIBrowsableAPIRenderer, self).get_context(data, accepted_media_type, renderer_context) - # in the real flow, this is added by the MultiTenant Middleware - # but this function is called before the middleware system is involved - - # ctx['response_headers']['X-Schema'] = ",".join(state.schemas) - ctx['response_headers']['cache-version'] = str(state.get('cache-version')) - ctx['response_headers']['cache-key'] = str(state.get('cache-key')) - ctx['response_headers']['system-filters'] = getattr(state.request, '_system_filter', '') - ctx['response_headers']['filters'] = state.get('filters', '') - ctx['response_headers']['excludes'] = state.get('excludes', '') - - return ctx - - -class CSVRenderer(r.CSVRenderer): - - def render(self, data, media_type=None, renderer_context={}, writer_opts=None): - data = dict(data)['results'] - return super().render(data, media_type, renderer_context, writer_opts) + pass diff --git a/src/etools_datamart/api/urls.py b/src/etools_datamart/api/urls.py index 41cfc6924..92cc86182 100644 --- a/src/etools_datamart/api/urls.py +++ b/src/etools_datamart/api/urls.py @@ -30,6 +30,7 @@ class ReadOnlyRouter(APIReadOnlyRouter): router.register(r'datamart/interventions', endpoints.InterventionViewSet) router.register(r'datamart/fam-indicators', endpoints.FAMIndicatorViewSet) router.register(r'datamart/user-stats', endpoints.UserStatsViewSet) +router.register(r'datamart/hact', endpoints.HACTViewSet) router.register(r'system/tasks-log', endpoints.TaskLogViewSet) diff --git a/src/etools_datamart/apps/core/apps.py b/src/etools_datamart/apps/core/apps.py index 8cf3d22e0..1962d12f7 100644 --- a/src/etools_datamart/apps/core/apps.py +++ b/src/etools_datamart/apps/core/apps.py @@ -1,8 +1,14 @@ -# import crashlog.middleware -# from django.apps import AppConfig -# from django.core.signals import got_request_exception -# -# -# class Config(AppConfig): -# def ready(self): -# got_request_exception.connect(crashlog.middleware.process_exception) +from django.apps import AppConfig + + +def invalidate_cache(sender, **kwargs): + for service in sender.linked_services: + service.invalidate_cache() + + +class Config(AppConfig): + name = 'etools_datamart.apps.core' + + def ready(self): + from etools_datamart.apps.etl.signals import data_refreshed + data_refreshed.connect(invalidate_cache) diff --git a/src/etools_datamart/apps/data/admin.py b/src/etools_datamart/apps/data/admin.py index 129a0e643..7b63661b4 100644 --- a/src/etools_datamart/apps/data/admin.py +++ b/src/etools_datamart/apps/data/admin.py @@ -14,8 +14,8 @@ from humanize import naturaldelta from unicef_rest_framework.models import Service -from etools_datamart.apps.etl.tasks import load_fam_indicator, load_intervention, load_pmp_indicator, load_user_report from etools_datamart.apps.multitenant.admin import SchemaFilter +from etools_datamart.config import settings from etools_datamart.libs.truncate import TruncateTableMixin from . import models @@ -29,9 +29,15 @@ class DatamartChangeList(ChangeList): class DataModelAdmin(ExtraUrlMixin, ModelAdmin): actions = None - load_handler = None + # load_handler = None list_filter = (SchemaFilter,) + def __init__(self, model, admin_site): + import etools_datamart.apps.etl.tasks.etl as mod + # we ned to force celery task initialization + self.loaders = [v for v in mod.__dict__.values() if hasattr(v, 'apply_async')] + super().__init__(model, admin_site) + def get_changelist(self, request, **kwargs): return DatamartChangeList @@ -54,18 +60,30 @@ def changeform_view(self, request, object_id=None, form_url='', extra_context=No return HttpResponseRedirect(redirect_url) return self._changeform_view(request, object_id, form_url, extra_context) + @link() + def invalidate_cache(self, request): + for s in Service.objects.all(): + if s.managed_model == self.model: + s.invalidate_cache() + @link() def api(self, request): for s in Service.objects.all(): if s.managed_model == self.model: return HttpResponseRedirect(s.endpoint) - return "" + return "" # pragma: no cover @link() def queue(self, request): try: + start = time() self.model._etl_task.delay() - self.message_user(request, "ETL task scheduled", messages.SUCCESS) + if settings.CELERY_TASK_ALWAYS_EAGER: # pragma: no cover + stop = time() + duration = stop - start + self.message_user(request, "Data loaded in %s" % naturaldelta(duration), messages.SUCCESS) + else: + self.message_user(request, "ETL task scheduled", messages.SUCCESS) except Exception as e: # pragma: no cover self.message_user(request, str(e), messages.ERROR) finally: @@ -96,7 +114,7 @@ class PMPIndicatorsAdmin(DataModelAdmin, TruncateTableMixin): ) search_fields = ('partner_name',) date_hierarchy = 'pd_ssfa_creation_date' - load_handler = load_pmp_indicator + # load_handler = load_pmp_indicator @register(models.Intervention) @@ -109,14 +127,14 @@ class InterventionAdmin(DataModelAdmin, TruncateTableMixin): ) search_fields = ('number', 'title') date_hierarchy = 'start_date' - load_handler = load_intervention + # load_handler = load_intervention @register(models.FAMIndicator) class FAMIndicatorAdmin(DataModelAdmin): list_display = ('country_name', 'schema_name', 'month',) list_filter = (SchemaFilter, 'month',) - load_handler = load_fam_indicator + # load_handler = load_fam_indicator date_hierarchy = 'month' @@ -124,5 +142,16 @@ class FAMIndicatorAdmin(DataModelAdmin): class UserStatsAdmin(DataModelAdmin): list_display = ('country_name', 'schema_name', 'month', 'total', 'unicef', 'logins', 'unicef_logins') list_filter = (SchemaFilter, 'month') - load_handler = load_user_report + # load_handler = load_user_report date_hierarchy = 'month' + + +@register(models.HACT) +class HACTAdmin(DataModelAdmin): + list_display = ('country_name', 'schema_name', 'year', + 'microassessments_total', + 'programmaticvisits_total', + 'followup_spotcheck', 'completed_spotcheck', + 'completed_hact_audits', 'completed_special_audits') + list_filter = (SchemaFilter, 'year') + # load_handler = load_hact diff --git a/src/etools_datamart/apps/data/migrations/0003_hact.py b/src/etools_datamart/apps/data/migrations/0003_hact.py new file mode 100644 index 000000000..486edd3b9 --- /dev/null +++ b/src/etools_datamart/apps/data/migrations/0003_hact.py @@ -0,0 +1,33 @@ +# Generated by Django 2.1.3 on 2018-11-09 17:08 + +import unicef_security.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('data', '0002_intervention_partner_name'), + ] + + operations = [ + migrations.CreateModel( + name='HACT', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('country_name', models.CharField(db_index=True, max_length=50)), + ('schema_name', models.CharField(db_index=True, max_length=50)), + ('year', models.IntegerField()), + ('microassessments_total', models.IntegerField(default=0, help_text='Total number of completed Microassessments in the business area in the past year')), + ('programmaticvisits_total', models.IntegerField(default=0, help_text='Total number of completed Programmatic visits in the business area')), + ('followup_spotcheck', models.IntegerField(default=0, help_text='Total number of completed Programmatic visits in the business area')), + ('completed_spotcheck', models.IntegerField(default=0, help_text='Total number of completed Programmatic visits in the business area')), + ('completed_hact_audits', models.IntegerField(default=0, help_text='Total number of completed scheduled audits for the workspace.')), + ('completed_special_audits', models.IntegerField(default=0, help_text='Total number of completed special audits for the workspace. ')), + ], + options={ + 'abstract': False, + }, + bases=(models.Model, unicef_security.models.TimeStampedModel), + ), + ] diff --git a/src/etools_datamart/apps/data/models/__init__.py b/src/etools_datamart/apps/data/models/__init__.py index 5ca11acd3..1c4c1109f 100644 --- a/src/etools_datamart/apps/data/models/__init__.py +++ b/src/etools_datamart/apps/data/models/__init__.py @@ -2,3 +2,4 @@ from .pmp import PMPIndicators # noqa from .fam import FAMIndicator # noqa from .user import UserStats # noqa +from .hact import HACT # noqa diff --git a/src/etools_datamart/apps/data/models/base.py b/src/etools_datamart/apps/data/models/base.py index 86a05ba82..fd40300dc 100644 --- a/src/etools_datamart/apps/data/models/base.py +++ b/src/etools_datamart/apps/data/models/base.py @@ -1,3 +1,4 @@ +from celery.local import class_property from django.db import models from django.db.models import QuerySet from django.db.models.manager import BaseManager @@ -25,3 +26,8 @@ class Meta: abstract = True objects = DataMartManager() + + @class_property + def linked_services(self): + from unicef_rest_framework.models import Service + return [s for s in Service.objects.all() if s.managed_model == self] diff --git a/src/etools_datamart/apps/data/models/fam.py b/src/etools_datamart/apps/data/models/fam.py index 9710d41ad..11084fc49 100644 --- a/src/etools_datamart/apps/data/models/fam.py +++ b/src/etools_datamart/apps/data/models/fam.py @@ -26,3 +26,4 @@ class FAMIndicator(DataMartModel): class Meta: ordering = ('month', 'country_name') + unique_together = ('month', 'country_name') diff --git a/src/etools_datamart/apps/data/models/hact.py b/src/etools_datamart/apps/data/models/hact.py new file mode 100644 index 000000000..bf43fb2d6 --- /dev/null +++ b/src/etools_datamart/apps/data/models/hact.py @@ -0,0 +1,23 @@ +from django.db import models + +from etools_datamart.apps.data.models.base import DataMartModel + + +class HACT(DataMartModel): + year = models.IntegerField() + microassessments_total = models.IntegerField(default=0, + help_text="Total number of completed Microassessments in the business area in the past year") + programmaticvisits_total = models.IntegerField(default=0, + help_text="Total number of completed Programmatic visits in the business area") + followup_spotcheck = models.IntegerField(default=0, + help_text="Total number of completed Programmatic visits in the business area") + completed_spotcheck = models.IntegerField(default=0, + help_text="Total number of completed Programmatic visits in the business area") + completed_hact_audits = models.IntegerField(default=0, + help_text="Total number of completed scheduled audits for the workspace.") + completed_special_audits = models.IntegerField(default=0, + help_text="Total number of completed special audits for the workspace. ") + + class Meta: + ordering = ('year', 'country_name') + unique_together = ('year', 'country_name') diff --git a/src/etools_datamart/apps/data/models/user.py b/src/etools_datamart/apps/data/models/user.py index a005e28ed..7f94c8b5d 100644 --- a/src/etools_datamart/apps/data/models/user.py +++ b/src/etools_datamart/apps/data/models/user.py @@ -13,3 +13,4 @@ class UserStats(DataMartModel): class Meta: ordering = ('-month', 'country_name') + unique_together = ('country_name', 'month') diff --git a/src/etools_datamart/apps/etl/admin.py b/src/etools_datamart/apps/etl/admin.py index a72920e57..7be70cd58 100644 --- a/src/etools_datamart/apps/etl/admin.py +++ b/src/etools_datamart/apps/etl/admin.py @@ -5,23 +5,48 @@ from django.contrib.admin import register from django.http import HttpResponseRedirect from django.urls import reverse +from django.utils.html import format_html +from django_celery_beat.models import PeriodicTask from humanize import naturaldelta from etools_datamart.apps.etl.lock import cache +from etools_datamart.celery import app from etools_datamart.libs.truncate import TruncateTableMixin from . import models -@register(models.TaskLog) -class ExecutionAdmin(TruncateTableMixin, admin.ModelAdmin): +@register(models.EtlTask) +class EtlTaskAdmin(TruncateTableMixin, admin.ModelAdmin): list_display = ('task', 'timestamp', 'result', 'time', - 'last_success', 'last_failure', 'running') + 'last_success', 'last_failure', 'lock', 'scheduling', 'queue_task') + readonly_fields = ('task', 'timestamp', 'result', 'elapsed', 'time', 'last_success', 'last_failure', 'table_name', 'content_type') date_hierarchy = 'timestamp' actions = None + def scheduling(self, obj): + opts = PeriodicTask._meta + if obj.periodic_task: + pt = obj.periodic_task + url = reverse('admin:%s_%s_change' % (opts.app_label, + opts.model_name), args=[pt.id]) + url = f"{url}?name={obj.task}&task={obj.task}" + label = (pt.crontab or pt.solar or pt.interval) + else: + url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name)) + label = 'Schedule' + + return format_html(f'{label}') + + def queue_task(self, obj): + opts = self.model._meta + url = reverse('admin:%s_%s_queue' % (opts.app_label, + opts.model_name), args=[obj.id]) + return format_html(f'queue') + queue_task.verbse_name = 'queue' + def has_add_permission(self, request): return False @@ -31,10 +56,10 @@ def has_delete_permission(self, request, obj=None): def time(self, obj): return naturaldelta(obj.elapsed) - def running(self, obj): + def lock(self, obj): return f"{obj.task}-lock" in cache - running.boolean = True + lock.boolean = True def changeform_view(self, request, object_id=None, form_url='', extra_context=None): if request.method == 'POST': @@ -43,6 +68,16 @@ def changeform_view(self, request, object_id=None, form_url='', extra_context=No return HttpResponseRedirect(redirect_url) return self._changeform_view(request, object_id, form_url, extra_context) + @action() + def queue(self, request, pk): + obj = self.get_object(request, pk) + try: + task = app.tasks[obj.task] + task.delay() + self.message_user(request, f"Task '{obj.task}' queued", messages.SUCCESS) + except Exception as e: # pragma: no cover + self.message_user(request, f"Cannot queue '{obj.task}': {e}", messages.ERROR) + @action() def unlock(self, request, pk): obj = self.get_object(request, pk) diff --git a/src/etools_datamart/apps/etl/apps.py b/src/etools_datamart/apps/etl/apps.py index b553d3c8d..a8a63ec7d 100644 --- a/src/etools_datamart/apps/etl/apps.py +++ b/src/etools_datamart/apps/etl/apps.py @@ -1,6 +1,16 @@ # -*- coding: utf-8 -*- +from celery.signals import task_postrun from django.apps import AppConfig +from etools_datamart.apps.etl.signals import data_refreshed + class Config(AppConfig): name = 'etools_datamart.apps.etl' + + +@task_postrun.connect +def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, state, **kw): + if not hasattr(sender, 'linked_model'): + return + data_refreshed.send(sender.linked_model) diff --git a/src/etools_datamart/apps/etl/lock.py b/src/etools_datamart/apps/etl/lock.py index 224f3113d..9f09dbafd 100644 --- a/src/etools_datamart/apps/etl/lock.py +++ b/src/etools_datamart/apps/etl/lock.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- import logging -from functools import wraps +from functools import partial, wraps from django.core.cache import caches from redis.exceptions import LockError @@ -12,9 +12,21 @@ LOCK_EXPIRE = 60 * 60 # Lock expires in 1 hour +class TaskExecutionOverlap(Exception): + pass + + def only_one(function=None, key="", timeout=None): """Enforce only one celery task at a time.""" + def _unlock(key): + try: + lock = cache.lock(key, timeout=timeout) + cache.delete(key) + lock.release() + except LockError: + pass + def _dec(run_func): """Decorator.""" @@ -28,16 +40,17 @@ def _caller(*args, **kwargs): have_lock = lock.acquire(blocking=False) if have_lock: ret_value = run_func(*args, **kwargs) - + # else: + # raise TaskExecutionOverlap(key) finally: if have_lock: try: lock.release() - except LockError as e: + except LockError as e: # pragma: no cover logger.warning(e) return ret_value return _caller - + function.unlock = partial(_unlock, key) return _dec(function) if function is not None else _dec diff --git a/src/etools_datamart/apps/etl/migrations/0002_auto_20181119_2028.py b/src/etools_datamart/apps/etl/migrations/0002_auto_20181119_2028.py new file mode 100644 index 000000000..3306e10fd --- /dev/null +++ b/src/etools_datamart/apps/etl/migrations/0002_auto_20181119_2028.py @@ -0,0 +1,18 @@ +# Generated by Django 2.1.3 on 2018-11-19 20:28 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('etl', '0001_initial'), + ] + + operations = [ + migrations.RenameModel( + old_name='TaskLog', + new_name='EtlTask', + ), + ] diff --git a/src/etools_datamart/apps/etl/models.py b/src/etools_datamart/apps/etl/models.py index ddcab28ac..501f3e250 100644 --- a/src/etools_datamart/apps/etl/models.py +++ b/src/etools_datamart/apps/etl/models.py @@ -2,6 +2,8 @@ from django.contrib.contenttypes.models import ContentType from django.db import models from django.db.models import Model +from django.utils.functional import cached_property +from django_celery_beat.models import PeriodicTask from etools_datamart.celery import app, ETLTask @@ -17,19 +19,22 @@ def get_for_task(self, task: ETLTask): table_name=task.linked_model._meta.db_table))[0] def inspect(self): - tasks = [cls for (name, cls) in app.tasks.items() if name.startswith('etl_')] + tasks = app.get_all_etls() results = {True: 0, False: 0} + new = [] for task in tasks: - __, created = self.get_or_create(task=task.name, - defaults=dict( - content_type=ContentType.objects.get_for_model(task.linked_model), - timestamp=None, - table_name=task.linked_model._meta.db_table)) + t, created = self.get_or_create(task=task.name, + defaults=dict( + content_type=ContentType.objects.get_for_model(task.linked_model), + timestamp=None, + table_name=task.linked_model._meta.db_table)) results[created] += 1 + new.append(t.id) + self.exclude(id__in=new).delete() return results[True], results[False] -class TaskLog(models.Model): +class EtlTask(models.Model): task = models.CharField(max_length=200, unique=True) timestamp = models.DateTimeField(null=True) result = models.CharField(max_length=200) @@ -41,5 +46,15 @@ class TaskLog(models.Model): objects = TaskLogManager() + class Meta: + get_latest_by = 'timestamp' + def __str__(self): return f"{self.task} {self.result}" + + @cached_property + def periodic_task(self): + try: + return PeriodicTask.objects.get(task=self.task) + except PeriodicTask.DoesNotExist: + pass diff --git a/src/etools_datamart/apps/etl/signals.py b/src/etools_datamart/apps/etl/signals.py new file mode 100644 index 000000000..e652eaea5 --- /dev/null +++ b/src/etools_datamart/apps/etl/signals.py @@ -0,0 +1,3 @@ +from django.db.models.signals import ModelSignal + +data_refreshed = ModelSignal(providing_args=["model"]) diff --git a/src/etools_datamart/apps/etl/tasks/etl.py b/src/etools_datamart/apps/etl/tasks/etl.py index 73b9adfef..cfa440876 100644 --- a/src/etools_datamart/apps/etl/tasks/etl.py +++ b/src/etools_datamart/apps/etl/tasks/etl.py @@ -1,22 +1,79 @@ # -*- coding: utf-8 -*- +import json import logging from datetime import date, datetime from django.db import connections from django.db.models import Sum from django.db.models.functions import Coalesce +from django.utils import timezone from strategy_field.utils import get_attr -from etools_datamart.apps.data.models import Intervention, PMPIndicators +from etools_datamart.apps.data.models import HACT, Intervention, PMPIndicators from etools_datamart.apps.data.models.fam import FAMIndicator from etools_datamart.apps.data.models.user import UserStats from etools_datamart.apps.etools.models import (AuditAudit, AuditEngagement, AuditMicroassessment, - AuditSpecialaudit, AuditSpotcheck, AuthUser, PartnersIntervention, - PartnersPartnerorganization,) + AuditSpecialaudit, AuditSpotcheck, AuthUser, HactAggregatehact, + PartnersIntervention, PartnersPartnerorganization,) from etools_datamart.celery import app logger = logging.getLogger(__name__) +__all__ = ["load_hact", "load_user_report", "load_fam_indicator", + "load_pmp_indicator", "load_intervention"] + + +@app.etl(HACT) +def load_hact(): + connection = connections['etools'] + countries = connection.get_tenants() + today = timezone.now() + created = {} + for country in countries: + created[country.name] = 0 + connection.set_schemas([country.schema_name]) + + logger.info(u'Running on %s' % country.name) + aggregate = HactAggregatehact.objects.get(year=today.year) + data = json.loads(aggregate.partner_values) + + # PartnersPartnerorganization.objects.hact_active() + # qs = PartnersPartnerorganization.objects.filter(Q(reported_cy__gt=0) | Q(total_ct_cy__gt=0), hidden=False) + # values = dict(microassessments_total=0, + # programmaticvisits_total=0, + # followup_spotcheck=0, + # completed_hact_audits=0, + # completed_special_audits=0, + # ) + # for partner in qs.all(): + # values['microassessments_total'] += AuditEngagement.objects.filter( + # engagement_type=AuditEngagement.TYPE_MICRO_ASSESSMENT, + # status=AuditEngagement.FINAL, date_of_draft_report_to_unicef__year=datetime.now().year).count() + # + # values['programmaticvisits_total'] += partner.hact_values['programmatic_visits']['completed']['total'] + # values['followup_spotcheck'] = qs.aggregate(total=Coalesce(Sum( + # 'planned_engagement__spot_check_follow_up'), 0))['total'] + # + # # completed_hact_audits = ? + # values['completed_special_audits'] += AuditEngagement.objects.filter( + # engagement_type=AuditEngagement.TYPE_SPECIAL_AUDIT, + # status=AuditEngagement.FINAL, date_of_draft_report_to_unicef__year=datetime.now().year).count() + + # # Total number of completed Microassessments in the business area in the past year + values = dict(microassessments_total=data['assurance_activities']['micro_assessment'], + programmaticvisits_total=data['assurance_activities']['programmatic_visits']['completed'], + followup_spotcheck=data['assurance_activities']['spot_checks']['follow_up'], + completed_spotcheck=data['assurance_activities']['spot_checks']['completed'], + completed_hact_audits=data['assurance_activities']['scheduled_audit'], + completed_special_audits=data['assurance_activities']['special_audit'], + ) + HACT.objects.update_or_create(year=today.year, + country_name=country.name, + schema_name=country.schema_name, + defaults=values) + + return created + @app.etl(PMPIndicators) def load_pmp_indicator(): @@ -225,36 +282,3 @@ def load_user_report(): created[country.name] += 1 return created - - # start_date = kwargs.get('start_date', None) - # if start_date: - # start_date = datetime.strptime(start_date.pop(), '%Y-%m-%d') - # else: - # start_date = date.today() + relativedelta(months=-1) - # - # countries = kwargs.get('countries', None) - # qs = Country.objects.exclude(schema_name__in=['public', 'uat', 'frg']) - # if countries: - # qs = qs.filter(schema_name__in=countries.pop().split(',')) - # fieldnames = ['Country', 'Total Users', 'Unicef Users', 'Last month Users', 'Last month Unicef Users'] - # dict_writer = writer(fieldnames=fieldnames) - # dict_writer.writeheader() - # - # for country in qs: - # dict_writer.writerow({ - # 'Country': country, - # 'Total Users': get_user_model().objects.filter(profile__country=country).count(), - # 'Unicef Users': get_user_model().objects.filter( - # profile__country=country, - # email__endswith='@unicef.org' - # ).count(), - # 'Last month Users': get_user_model().objects.filter( - # profile__country=country, - # last_login__gte=start_date - # ).count(), - # 'Last month Unicef Users': get_user_model().objects.filter( - # profile__country=country, - # email__endswith='@unicef.org', - # last_login__gte=start_date - # ).count(), - # }) diff --git a/src/etools_datamart/apps/etools/admin.py b/src/etools_datamart/apps/etools/admin.py index 8a640996d..8969fe5de 100644 --- a/src/etools_datamart/apps/etools/admin.py +++ b/src/etools_datamart/apps/etools/admin.py @@ -96,3 +96,8 @@ class FundsReservationHeaderAdmin(TenantModelAdmin): @register(models.FundsFundsreservationitem) class FundsreservationitemAdmin(TenantModelAdmin): pass + + +@register(models.HactAggregatehact) +class HactAggregatehactAdmin(TenantModelAdmin): + list_display = ('schema', 'year',) diff --git a/src/etools_datamart/apps/etools/patch.py b/src/etools_datamart/apps/etools/patch.py index 6a8c432e9..768ce6495 100644 --- a/src/etools_datamart/apps/etools/patch.py +++ b/src/etools_datamart/apps/etools/patch.py @@ -7,6 +7,8 @@ from django.utils.translation import ugettext as _ from unicef_security.models import User +from etools_datamart.apps.etools.models import PartnersPlannedengagement + def label(attr, self): return getattr(self, attr) @@ -17,6 +19,12 @@ def create_alias(model, aliases): r = getattr(model, related) setattr(model, business_name, r) + # for related, business_name in aliases: + # opts = model._meta + # fld = opts.get_field(related) + # fld.related_name = business_name + # setattr(model, business_name, fld) + def patch(): from django.apps import apps @@ -24,6 +32,18 @@ def patch(): PartnersPartnerorganization, UsersCountry, UsersUserprofile, UsersUserprofileCountriesAvailable, ) + AuditEngagement.TYPE_AUDIT = 'audit' + AuditEngagement.TYPE_MICRO_ASSESSMENT = 'ma' + AuditEngagement.TYPE_SPOT_CHECK = 'sc' + AuditEngagement.TYPE_SPECIAL_AUDIT = 'sa' + + AuditEngagement.TYPES = ( + (AuditEngagement.TYPE_AUDIT, _('Audit')), + (AuditEngagement.TYPE_MICRO_ASSESSMENT, _('Micro Assessment')), + (AuditEngagement.TYPE_SPOT_CHECK, _('Spot Check')), + (AuditEngagement.TYPE_SPECIAL_AUDIT, _('Special Audit')), + ) + AuditEngagement.PARTNER_CONTACTED = 'partner_contacted' AuditEngagement.REPORT_SUBMITTED = 'report_submitted' AuditEngagement.FINAL = 'final' @@ -34,6 +54,7 @@ def patch(): (AuditEngagement.FINAL, _('Final Report')), (AuditEngagement.CANCELLED, _('Cancelled')), ) + # AuditEngagement._meta.fields['engagement_type'].choices = AuditEngagement.TYPES PartnersPartnerorganization.current_core_value_assessment = cached_property( lambda self: @@ -91,7 +112,7 @@ def patch(): AuthUser.set_password = User.set_password - AuthUser.profile = cached_property(lambda self: UsersUserprofile.objects.get(user_id=self.id)) + # AuthUser.profile = cached_property(lambda self: UsersUserprofile.objects.get(user_id=self.id)) # groups = models.ManyToManyField( # Group, @@ -125,9 +146,19 @@ def patch(): aliases = ( # CoreValuesAssessment.partner ['partnerspartnerorganization_partners_corevaluesassessment_partner_id', - 'core_values_assessments'],) + 'core_values_assessments'], + # PlannedEngagement + # ['partnerspartnerorganization_partners_plannedengagement_partner_id', + # 'planned_engagement'], + ) create_alias(PartnersPartnerorganization, aliases) + f = [f for f in PartnersPlannedengagement._meta.local_fields if f.name != 'partner'] + PartnersPlannedengagement._meta.local_fields = f + models.OneToOneField(PartnersPartnerorganization, + related_name='planned_engagement', + on_delete=models.PROTECT).contribute_to_class(PartnersPlannedengagement, 'partner') + aliases = (['partnersintervention_partners_interventionbudget_intervention_id', 'planned_budget'], ['partnersintervention_funds_fundsreservationheader_intervention_id', 'frs']) diff --git a/src/etools_datamart/apps/etools/utils.py b/src/etools_datamart/apps/etools/utils.py index bcdd2307c..036e6f23b 100644 --- a/src/etools_datamart/apps/etools/utils.py +++ b/src/etools_datamart/apps/etools/utils.py @@ -18,9 +18,9 @@ def get_etools_allowed_schemas(user): else: return set() - -def schema_is_valid(*schema): - return schema in conn.all_schemas +# +# def schema_is_valid(*schema): +# return schema in conn.all_schemas def validate_schemas(*schemas): diff --git a/src/etools_datamart/apps/init/management/commands/init-setup.py b/src/etools_datamart/apps/init/management/commands/init-setup.py index d483ad8bb..70ab9a837 100644 --- a/src/etools_datamart/apps/init/management/commands/init-setup.py +++ b/src/etools_datamart/apps/init/management/commands/init-setup.py @@ -1,6 +1,6 @@ import os -import sys import warnings +from urllib.parse import urlparse from django.conf import settings from django.contrib.auth import get_user_model @@ -11,10 +11,11 @@ from django.utils.module_loading import import_string from django_celery_beat.models import CrontabSchedule, PeriodicTask from humanize import naturaldelta +from redisboard.models import RedisServer from strategy_field.utils import fqn from unicef_rest_framework.models.acl import GroupAccessControl -from etools_datamart.apps.etl.models import TaskLog +from etools_datamart.apps.etl.models import EtlTask from etools_datamart.celery import app @@ -105,13 +106,20 @@ def handle(self, *args, **options): serializers=['*'], policy=GroupAccessControl.POLICY_ALLOW ) + # hostname + for entry, values in settings.CACHES.items(): + loc = values.get('LOCATION', '') + spec = urlparse(loc) + if spec.scheme == 'redis': + RedisServer.objects.get_or_create(hostname=spec.netloc, + port=int(spec.port)) if os.environ.get('AUTOCREATE_USERS'): self.stdout.write("Found 'AUTOCREATE_USERS' environment variable") self.stdout.write("Going to create new users") try: - for entry in os.environ.get('AUTOCREATE_USERS').split(','): - user, pwd = entry.split('|') + for entry in os.environ.get('AUTOCREATE_USERS').split('|'): + user, pwd = entry.split(',') User = get_user_model() u, created = User.objects.get_or_create(username=user) if created: @@ -119,16 +127,16 @@ def handle(self, *args, **options): u.set_password(pwd) u.save() u.groups.add(all_access) - else: + else: # pragma: no cover self.stdout.write(f"User {u} already exists.") - except Exception as e: + except Exception as e: # pragma: no cover warnings.warn(f"Unable to create default users. {e}") if options['tasks'] or _all or options['refresh']: midnight, __ = CrontabSchedule.objects.get_or_create(minute=0, hour=0) - tasks = [cls for (name, cls) in app.tasks.items() if name.startswith('etl_')] + tasks = app.get_all_etls() counters = {True: 0, False: 0} for task in tasks: __, is_new = PeriodicTask.objects.get_or_create(task=fqn(task), @@ -141,7 +149,7 @@ def handle(self, *args, **options): task.delete() counters[False] += 1 - TaskLog.objects.inspect() + EtlTask.objects.inspect() self.stdout.write( f"{PeriodicTask.objects.count()} tasks found. {counters[True]} new. {counters[False]} deleted") @@ -156,10 +164,6 @@ def handle(self, *args, **options): etl.delay() self.stdout.write(f"{task.name} scheduled") else: - ret = etl.apply() + etl.apply() cost = naturaldelta(app.timers[task.name]) - if isinstance(ret.result, Exception): # pragma: no cover - self.stderr.write(f"\n{ret.result}") - sys.exit(1) - - self.stdout.write(f"{task.name} created {sum(ret.result.values())} records in {cost}") + self.stdout.write(f"{task.name} excuted in {cost}") diff --git a/src/etools_datamart/apps/multitenant/management/commands/inspectschema.py b/src/etools_datamart/apps/multitenant/management/commands/inspectschema.py index 070b1037e..107be08d3 100644 --- a/src/etools_datamart/apps/multitenant/management/commands/inspectschema.py +++ b/src/etools_datamart/apps/multitenant/management/commands/inspectschema.py @@ -7,7 +7,7 @@ from django.db.models.constants import LOOKUP_SEP from django_regex.utils import RegexList -from etools_datamart import state +# from etools_datamart import state INGNORED_TABLES = RegexList([ # Both @@ -64,7 +64,7 @@ def strip_prefix(s): return s[1:] if s.startswith("u'") else s # connection.mode = SINGLE_TENANT - state.schemas = [schema, "public"] + # state.schemas = [schema, "public"] connection.schema_name = schema with connection.cursor() as cursor: # cursor.execute(raw_sql(f"SET search_path={schema}")) diff --git a/src/etools_datamart/apps/multitenant/postgresql/base.py b/src/etools_datamart/apps/multitenant/postgresql/base.py index 21e382b9c..33494a154 100644 --- a/src/etools_datamart/apps/multitenant/postgresql/base.py +++ b/src/etools_datamart/apps/multitenant/postgresql/base.py @@ -231,7 +231,7 @@ def set_all_schemas(self): # category=DeprecationWarning) # return self.tenant - def make_debug_cursor(self, cursor): + def make_debug_cursor(self, cursor): # pragma: no cover """Create a cursor that logs all queries in self.queries_log.""" return TenantDebugCursor(cursor, self) diff --git a/src/etools_datamart/apps/multitenant/postgresql/public.sqldump b/src/etools_datamart/apps/multitenant/postgresql/public.sqldump index 50d869345..0a761fec4 100644 Binary files a/src/etools_datamart/apps/multitenant/postgresql/public.sqldump and b/src/etools_datamart/apps/multitenant/postgresql/public.sqldump differ diff --git a/src/etools_datamart/apps/multitenant/postgresql/tenant.sql b/src/etools_datamart/apps/multitenant/postgresql/tenant.sql index 840ce32b9..5cfc76599 100644 --- a/src/etools_datamart/apps/multitenant/postgresql/tenant.sql +++ b/src/etools_datamart/apps/multitenant/postgresql/tenant.sql @@ -2,7 +2,7 @@ -- PostgreSQL database dump -- --- Dumped from database version 10.4 (Debian 10.4-2.pgdg90+1) +-- Dumped from database version 10.5 (Debian 10.5-2.pgdg90+1) -- Dumped by pg_dump version 10.4 SET statement_timeout = 0; @@ -65170,3 +65170,4 @@ ALTER TABLE ONLY [[schema]].unicef_snapshot_activity -- -- PostgreSQL database dump complete -- + diff --git a/src/etools_datamart/apps/multitenant/query.py b/src/etools_datamart/apps/multitenant/query.py index c9332cd3c..13735f4cc 100644 --- a/src/etools_datamart/apps/multitenant/query.py +++ b/src/etools_datamart/apps/multitenant/query.py @@ -48,7 +48,7 @@ class TenantRelatedPopulator(RelatedPopulator): # self.remote_setter = klass_info['remote_setter'] def populate(self, row, from_obj): - if self.reorder_for_init: + if self.reorder_for_init: # pragma: no cover obj_data = self.reorder_for_init(row) else: obj_data = row[self.cols_start:self.cols_end] @@ -59,7 +59,7 @@ def populate(self, row, from_obj): if 'schema' in init_list: init_list.remove('schema') obj = self.model_cls.from_db(self.db, init_list, obj_data) - if self.related_populators: + if self.related_populators: # pragma: no cover for rel_iter in self.related_populators: rel_iter.populate(row, obj) self.local_setter(from_obj, obj) @@ -100,7 +100,7 @@ def __iter__(self): if related_populators: for rel_populator in related_populators: rel_populator.populate(row, obj) - if annotation_col_map: + if annotation_col_map: # pragma: no cover for attr_name, col_pos in annotation_col_map.items(): setattr(obj, attr_name, row[col_pos]) diff --git a/src/etools_datamart/apps/multitenant/templates/schemafilter.html b/src/etools_datamart/apps/multitenant/templates/schemafilter.html index 2e7f2e210..ea6eea550 100644 --- a/src/etools_datamart/apps/multitenant/templates/schemafilter.html +++ b/src/etools_datamart/apps/multitenant/templates/schemafilter.html @@ -1,5 +1,4 @@ {% load i18n multitenant %} -{% get_state %} {% schemas %}

{% blocktrans with title as filter_title %} By {{ filter_title }} {% endblocktrans %}

diff --git a/src/etools_datamart/apps/multitenant/templatetags/multitenant.py b/src/etools_datamart/apps/multitenant/templatetags/multitenant.py index ced63afc8..12e651ed1 100644 --- a/src/etools_datamart/apps/multitenant/templatetags/multitenant.py +++ b/src/etools_datamart/apps/multitenant/templatetags/multitenant.py @@ -1,18 +1,17 @@ # -*- coding: utf-8 -*- from django import template from django.db import connections -from django.urls import reverse -from etools_datamart.state import state +# from unicef_rest_framework.state import state register = template.Library() - -@register.simple_tag(takes_context=True) -def select_schema(context): - url = reverse("select-schema") - request = context['request'] - return f"{url}?from={request.path}" +# +# @register.simple_tag(takes_context=True) +# def select_schema(context): +# url = reverse("select-schema") +# request = context['request'] +# return f"{url}?from={request.path}" @register.simple_tag(takes_context=True) @@ -21,9 +20,9 @@ def schemas(context): context['schemas'] = conn.schemas return "" - -@register.simple_tag(takes_context=True) -def get_state(context): - context['state'] = state - context['request'] = state.request - return "" +# +# @register.simple_tag(takes_context=True) +# def get_state(context): +# context['state'] = state +# context['request'] = state.request +# return "" diff --git a/src/etools_datamart/apps/multitenant/views.py b/src/etools_datamart/apps/multitenant/views.py index 0a749ed58..84602daab 100644 --- a/src/etools_datamart/apps/multitenant/views.py +++ b/src/etools_datamart/apps/multitenant/views.py @@ -7,7 +7,8 @@ from django.views.generic.edit import FormView from etools_datamart.apps.multitenant.forms import SchemasForm -from etools_datamart.state import state + +# from unicef_rest_framework.state import state logger = logging.getLogger(__name__) @@ -44,7 +45,7 @@ def form_valid(self, form): self.selected = ['_all'] response = HttpResponseRedirect(self.get_success_url()) - state.schemas = self.selected + # state.schemas = self.selected # response.set_cookie('schemas', ','.join(self.selected)) return response diff --git a/src/etools_datamart/apps/tracking/apps.py b/src/etools_datamart/apps/tracking/apps.py index 820450e09..236ce4c9d 100644 --- a/src/etools_datamart/apps/tracking/apps.py +++ b/src/etools_datamart/apps/tracking/apps.py @@ -3,4 +3,4 @@ class Config(AppConfig): name = 'etools_datamart.apps.tracking' - verbose_name = 'Logs' + verbose_name = 'Access Log & Counters' diff --git a/src/etools_datamart/apps/tracking/middleware.py b/src/etools_datamart/apps/tracking/middleware.py index e2e99379b..fd55dc265 100644 --- a/src/etools_datamart/apps/tracking/middleware.py +++ b/src/etools_datamart/apps/tracking/middleware.py @@ -7,14 +7,15 @@ from django.conf import settings from django.db.models import F from django.utils.timezone import now -from strategy_field.utils import fqn, get_attr +from strategy_field.utils import fqn from etools_datamart.apps.tracking import config -from etools_datamart.state import state -from .asyncqueue import AsyncQueue from .models import APIRequestLog, DailyCounter, MonthlyCounter, PathCounter, UserCounter +# from unicef_rest_framework.state import state + + logger = logging.getLogger(__name__) @@ -86,7 +87,6 @@ def record_to_kwargs(request, response): response_timedelta = now() - request.timestamp response_ms = int(response_timedelta.total_seconds() * 1000) response_length = len(response.content) - # get POST data try: data_dict = request.POST.dict() @@ -97,11 +97,11 @@ def record_to_kwargs(request, response): media_type = response.accepted_media_type except AttributeError: # pragma: no cover media_type = response['Content-Type'].split(';')[0] - viewset = getattr(request, 'viewset', None) - if not viewset: # pragma: no cover + view = request.api_info.get('view', None) + if not view: # pragma: no cover return {} - viewset = fqn(viewset) - service = get_attr(request, "service.name") + viewset = fqn(view) + service = request.api_info.get("service") from unicef_rest_framework.utils import get_ident return dict(user=user, requested_at=request.timestamp, @@ -114,14 +114,14 @@ def record_to_kwargs(request, response): query_params=json.dumps(request.GET.dict()), data=data_dict, viewset=viewset, - service=service, - cached=state.get('cache-hit') or False, # see api.common.APICacheResponse + service=service.name, + cached=request.api_info.get('cache-hit', False), # see api.common.APICacheResponse content_type=media_type) - -class AsyncLogger(AsyncQueue): - def _process(self, record): - log_request(**record_to_kwargs(**record)) +# +# class AsyncLogger(AsyncQueue): +# def _process(self, record): +# log_request(**record_to_kwargs(**record)) class StatsMiddleware(object): @@ -140,15 +140,14 @@ def __call__(self, request): response = self.get_response(request) if response.status_code == 200 and config.TRACK_PATH.match(request.path): - if config.TRACK_ANONYMOUS or request.user.is_authenticated: - self.log(request, response) + self.log(request, response) return response - -class ThreadedStatsMiddleware(StatsMiddleware): - def __init__(self, get_response): - super(ThreadedStatsMiddleware, self).__init__(get_response) - self.worker = AsyncLogger() - - def log(self, request, response): - self.worker.queue({'request': request, 'response': response}) +# +# class ThreadedStatsMiddleware(StatsMiddleware): +# def __init__(self, get_response): +# super(ThreadedStatsMiddleware, self).__init__(get_response) +# self.worker = AsyncLogger() +# +# def log(self, request, response): +# self.worker.queue({'request': request, 'response': response}) diff --git a/src/etools_datamart/apps/web/templates/base.html b/src/etools_datamart/apps/web/templates/base.html index c4e351edc..cc2b9a4e7 100644 --- a/src/etools_datamart/apps/web/templates/base.html +++ b/src/etools_datamart/apps/web/templates/base.html @@ -51,6 +51,9 @@ {% endblock %} {% block footer %}
+
+ {{ user.label }} +
Version: {% version %}
diff --git a/src/etools_datamart/apps/web/templates/index.html b/src/etools_datamart/apps/web/templates/index.html index 0afb3be43..065f9d192 100644 --- a/src/etools_datamart/apps/web/templates/index.html +++ b/src/etools_datamart/apps/web/templates/index.html @@ -7,7 +7,7 @@

eTools Datamart

  • Login
  • {% else %}
  • API
  • -
  • API Doc
  • +
  • Documentation
  • Swagger
  • {% if request.user.is_staff %}
  • Admin
  • @@ -15,11 +15,13 @@

    eTools Datamart

    {% if request.user.is_superuser %}
  • System Info
  • -
  • Supervisor
  • -
  • Flower
  • +{#
  • Supervisor
  • #} +{#
  • Flower
  • #} {% endif %}
     
  • Logout
  • +
  • Disconnect
  • + {% endif %} {% endblock %} diff --git a/src/etools_datamart/apps/web/urls.py b/src/etools_datamart/apps/web/urls.py index aa42bf80e..4bd2a39b0 100644 --- a/src/etools_datamart/apps/web/urls.py +++ b/src/etools_datamart/apps/web/urls.py @@ -1,11 +1,12 @@ from django.contrib.auth.views import LoginView, LogoutView from django.urls import path -from .views import index +from .views import DisconnectView, index urlpatterns = [ path(r'', index, name='home'), path(r'login/', LoginView.as_view(template_name='login.html'), name='login'), path(r'logout/', LogoutView.as_view(next_page='/'), name='logout'), + path(r'disconnect/', DisconnectView.as_view(next_page='/'), name='disconnect'), ] diff --git a/src/etools_datamart/apps/web/views.py b/src/etools_datamart/apps/web/views.py index 50308e6e0..d019fab59 100644 --- a/src/etools_datamart/apps/web/views.py +++ b/src/etools_datamart/apps/web/views.py @@ -1,5 +1,13 @@ +from django.contrib.auth.views import LogoutView from django.template.response import TemplateResponse +from etools_datamart.config.settings import env + def index(request): return TemplateResponse(request, 'index.html') + + +class DisconnectView(LogoutView): + def get_next_page(self): # pragma: no cover + return env('DISCONNECT_URL') diff --git a/src/etools_datamart/celery.py b/src/etools_datamart/celery.py index dff6b8298..6779a8489 100644 --- a/src/etools_datamart/celery.py +++ b/src/etools_datamart/celery.py @@ -2,7 +2,6 @@ from time import time from celery import Celery -from celery.app.task import TaskType from celery.signals import task_postrun, task_prerun from celery.task import Task from django.utils import timezone @@ -12,19 +11,7 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'etools_datamart.config.settings') -# def get_task_log(name, model): -# if not name: -# return None -# from django.contrib.contenttypes.models import ContentType -# from etools_datamart.apps.etl.models import TaskLog -# -# return TaskLog.objects.get_or_create(task=name, -# defaults=dict(content_type=ContentType.objects.get_for_model(model), -# timestamp=None, -# table_name=model._meta.db_table))[0] -# - -class ETLTask(Task, metaclass=TaskType): +class ETLTask(Task): abstract = True linked_model = None @@ -36,12 +23,12 @@ class DatamartCelery(Celery): def _task_from_fun(self, fun, name=None, base=None, bind=False, **options): linked_model = options.get('linked_model', None) name = name or self.gen_task_name(fun.__name__, fun.__module__) - # options['task_log'] = SimpleLazyObject(lambda: get_task_log(name, options.get('linked_model'))) + options['lock_key'] = f"{name}-lock" + fun = only_one(fun, options['lock_key']) + options['unlock'] = fun.unlock - fun = only_one(fun, f"{name}-lock") task = super()._task_from_fun(fun, name=name, base=None, bind=False, **options) if linked_model: - # task.linked_model = linked_model linked_model._etl_task = task linked_model._etl_loader = fun @@ -53,20 +40,26 @@ def etl(self, model, *args, **opts): task = super().task(*args, **opts) return task - def gen_task_name(self, name, module): - prefix = "" - if module.endswith('.tasks.etl'): - module = module[:-10] - prefix = 'etl_' - if module.endswith('.tasks'): - module = module[:-6] - return prefix + super(DatamartCelery, self).gen_task_name(name, module) + def get_all_etls(self): + return [cls for (name, cls) in self.tasks.items() if hasattr(cls, 'linked_model')] + + # def gen_task_name(self, name, module): + # prefix = "" + # if module.endswith('.tasks.etl'): + # module = module[:-10] + # prefix = 'etl_' + # if module.endswith('.tasks'): + # module = module[:-6] + # return prefix + super(DatamartCelery, self).gen_task_name(name, module) app = DatamartCelery('datamart') app.config_from_object('django.conf:settings', namespace='CELERY') -app.autodiscover_tasks(related_name='tasks') -app.autodiscover_tasks(related_name='etl') +# app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) +# app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()], +# related_name='tasks') +# app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()], +# related_name='etl') app.timers = {} @@ -78,11 +71,11 @@ def task_prerun_handler(signal, sender, task_id, task, args, kwargs, **kw): app.timers[task_id] = time() from django.contrib.contenttypes.models import ContentType - from etools_datamart.apps.etl.models import TaskLog + from etools_datamart.apps.etl.models import EtlTask defs = {'result': 'RUNNING', 'timestamp': timezone.now()} - TaskLog.objects.update_or_create(task=task.name, + EtlTask.objects.update_or_create(task=task.name, content_type=ContentType.objects.get_for_model(task.linked_model), table_name=task.linked_model._meta.db_table, defaults=defs) @@ -103,7 +96,7 @@ def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, st defs['last_success'] = timezone.now() else: defs['last_failure'] = timezone.now() - from etools_datamart.apps.etl.models import TaskLog + from etools_datamart.apps.etl.models import EtlTask - TaskLog.objects.update_or_create(task=task.name, defaults=defs) + EtlTask.objects.update_or_create(task=task.name, defaults=defs) app.timers[task.name] = cost diff --git a/src/etools_datamart/config/settings.py b/src/etools_datamart/config/settings.py index 859e19123..09989622e 100644 --- a/src/etools_datamart/config/settings.py +++ b/src/etools_datamart/config/settings.py @@ -15,13 +15,15 @@ ETOOLS_DUMP_LOCATION=(str, str(PACKAGE_DIR / 'apps' / 'multitenant' / 'postgresql')), CACHE_URL=(str, "redis://127.0.0.1:6379/1"), + # API_CACHE_URL=(str, "redis://127.0.0.1:6379/2"), API_CACHE_URL=(str, "locmemcache://"), # CACHE_URL=(str, "dummycache://"), # API_CACHE_URL=(str, "dummycache://"), - + DISCONNECT_URL=(str, 'https://login.microsoftonline.com/unicef.org/oauth2/logout'), ENABLE_LIVE_STATS=(bool, True), CELERY_BROKER_URL=(str, 'redis://127.0.0.1:6379/2'), CELERY_RESULT_BACKEND=(str, 'redis://127.0.0.1:6379/3'), + CELERY_ALWAYS_EAGER=(bool, False), CSRF_COOKIE_SECURE=(bool, True), DATABASE_URL=(str, "postgres://postgres:@127.0.0.1:5432/etools_datamart"), DATABASE_URL_ETOOLS=(str, "postgis://postgres:@127.0.0.1:15432/etools"), @@ -35,6 +37,7 @@ SECURE_FRAME_DENY=(bool, True), SESSION_COOKIE_SECURE=(bool, True), STATIC_ROOT=(str, '/tmp/static'), + STATIC_URL=(str, '/dm-static/'), X_FRAME_OPTIONS=(str, 'DENY'), AZURE_CLIENT_ID=(str, ''), @@ -55,10 +58,14 @@ ALLOWED_HOSTS = tuple(env.list('ALLOWED_HOSTS', default=[])) ADMINS = ( - ('', 'saxix@saxix.onmicrosoft.com'), - ('', 'sapostolico@unicef.org'), - ('', 'sapostolico@nikunicef.onmicrosoft.org'), - + ('Stefano', 'saxix@saxix.onmicrosoft.com'), + ('Stefano', 'sapostolico@unicef.org'), + ('Nik', 'ntrncic@unicef.org'), + ('Greg', 'greinbach@unicef.org'), + ('Zack', 'zadams@unicef.org'), + ('Robert', 'ravram@unicef.org'), + ('Domenico', 'ddinicola@unicef.org'), + ('Evan', 'ewheeler@unicef.org') ) DATABASES = { @@ -116,7 +123,7 @@ # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" -MEDIA_URL = '/media/' +MEDIA_URL = '/dm-media/' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files @@ -126,7 +133,7 @@ # URL prefix for static files. # Example: "http://media.lawrence.com/static/" -STATIC_URL = '/static/' +STATIC_URL = env('STATIC_URL') # Additional locations of static files STATICFILES_DIRS = ( @@ -153,7 +160,7 @@ 'django.contrib.auth.middleware.AuthenticationMiddleware', # 'django.contrib.auth.middleware.RemoteUserMiddleware', 'crashlog.middleware.CrashLogMiddleware', - 'etools_datamart.api.middleware.ApiMiddleware', + 'unicef_rest_framework.middleware.ApiMiddleware', # 'etools_datamart.apps.tracking.middleware.ThreadedStatsMiddleware', 'etools_datamart.apps.tracking.middleware.StatsMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', @@ -248,6 +255,7 @@ 'social_django', 'rest_framework_social_oauth2', 'unicef_security', + 'redisboard', 'django_filters', 'month_field', 'drf_querystringfilter', @@ -260,7 +268,7 @@ 'django_celery_beat', - 'etools_datamart.apps.core', + 'etools_datamart.apps.core.apps.Config', 'etools_datamart.apps.etools', 'etools_datamart.apps.data', 'etools_datamart.apps.etl.apps.Config', @@ -329,7 +337,7 @@ CONSTANCE_CONFIG = { 'AZURE_USE_GRAPH': (True, 'Use MS Graph API to fetch user data', bool), - 'DEFAULT_GROUP': ('Guests', 'Use MS Graph API to fetch user data', 'select_group'), + 'DEFAULT_GROUP': ('Guests', 'Default group new users belong to', 'select_group'), } CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers.DatabaseScheduler' @@ -339,7 +347,15 @@ CELERY_ACCEPT_CONTENT = ['application/json'] CELERY_RESULT_SERIALIZER = 'json' CELERY_TASK_SERIALIZER = 'json' +CELERY_TASK_IMPORTS = ["etools_datamart.apps.etl.tasks.etl", + "etools_datamart.apps.etl.tasks.tasks", ] CELERY_BEAT_SCHEDULE = {} +CELERY_TASK_ALWAYS_EAGER = env.bool('CELERY_ALWAYS_EAGER', False) +CELERY_EAGER_PROPAGATES_EXCEPTIONS = CELERY_TASK_ALWAYS_EAGER +CELERY_TASK_ROUTES = { + 'etools_datamart.apps.etl.tasks.etl': {'queue': 'etl'}, + 'etools_datamart.apps.etl.tasks.tasks': {'queue': 'tasks'}, +} CONCURRENCY_IGNORE_DEFAULT = False @@ -358,15 +374,6 @@ 'ORDERING_PARAM': 'ordering', } -AZURE_SSL = True -AZURE_URL_EXPIRATION_SECS = 10800 -AZURE_ACCESS_POLICY_EXPIRY = 10800 # length of time before signature expires in seconds -AZURE_ACCESS_POLICY_PERMISSION = 'r' -AZURE_TOKEN_URL = 'https://login.microsoftonline.com/saxix.onmicrosoft.com/oauth2/token' -AZURE_GRAPH_API_BASE_URL = 'https://graph.microsoft.com' -AZURE_GRAPH_API_VERSION = 'v1.0' -AZURE_GRAPH_API_PAGE_SIZE = 300 - JWT_AUTH = { 'JWT_VERIFY': False, # this requires private key 'JWT_VERIFY_EXPIRATION': True, @@ -519,7 +526,7 @@ 'propagate': False }, 'etools_datamart': { - 'handlers': ['console', 'db'], + 'handlers': ['null', 'db'], 'level': 'ERROR', 'propagate': False }, diff --git a/src/etools_datamart/libs/constance.py b/src/etools_datamart/libs/constance.py index daa9a8a2d..066a5905b 100644 --- a/src/etools_datamart/libs/constance.py +++ b/src/etools_datamart/libs/constance.py @@ -1,5 +1,8 @@ +from constance import config from django.contrib.auth.models import Group -from django.forms import ChoiceField, Select +from django.forms import ChoiceField, HiddenInput, Select, Textarea, TextInput +from django.template import Context, Template +from django.utils.safestring import mark_safe class GroupChoiceField(ChoiceField): @@ -12,3 +15,43 @@ def __init__(self, **kwargs): class GroupChoice(Select): pass + + +# class LabelInput(HiddenInput): +# +# def render(self, name, value, attrs=None, renderer=None): +# context = self.get_context(name, value, attrs) +# context['value'] = str(value) +# tpl = Template('{{ value }}') +# return mark_safe(tpl.render(Context(context))) +# + +class ObfuscatedInput(HiddenInput): + + def render(self, name, value, attrs=None, renderer=None): + context = self.get_context(name, value, attrs) + context['value'] = str(value) + context['label'] = "Set" if value else "Not Set" + + tpl = Template('{{ label }}') + return mark_safe(tpl.render(Context(context))) + + +class WriteOnlyWidget: + def format_value(self, value): + value = "***" + return super().format_value(value) + + def value_from_datadict(self, data, files, name): + value = data.get(name) + if value == '***': + return getattr(config, name) + return value + + +class WriteOnlyTextarea(WriteOnlyWidget, Textarea): + pass + + +class WriteOnlyInput(WriteOnlyWidget, TextInput): + pass diff --git a/src/etools_datamart/state.py b/src/etools_datamart/state.py deleted file mode 100644 index 7b5dc2422..000000000 --- a/src/etools_datamart/state.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- - -from threading import local - - -class State(local): - request = None - data = {} - - def clear(self): - self.data = {} - self.request = None - - def set(self, key, value): - self.data[key] = value - - def get(self, key, default=None): - return self.data.get(key, default) - - -state: State = State() diff --git a/src/unicef_rest_framework/auth.py b/src/unicef_rest_framework/auth.py index c7effee90..73fdd3690 100644 --- a/src/unicef_rest_framework/auth.py +++ b/src/unicef_rest_framework/auth.py @@ -2,7 +2,7 @@ from constance import config from crashlog.middleware import process_exception -from django.contrib.auth import get_user_model +from django.contrib.auth import get_user_model, login from django.utils.translation import ugettext as _ from rest_framework import exceptions from rest_framework.exceptions import AuthenticationFailed, PermissionDenied @@ -13,7 +13,7 @@ def jwt_get_username_from_payload(payload): - return payload.get('preferred_username') + return payload.get('preferred_username', payload.get('unique_name')) class JWTAuthentication(authentication.JSONWebTokenAuthentication): @@ -26,6 +26,8 @@ def authenticate(self, request): try: user, jwt_value = super(JWTAuthentication, self).authenticate(request) + request.user = user + login(request, user, 'social_core.backends.azuread_tenant.AzureADTenantOAuth2') except TypeError: # pragma: no cover raise PermissionDenied(detail='No valid authentication provided') except AuthenticationFailed as e: # pragma: no cover @@ -65,5 +67,7 @@ def authenticate_credentials(self, payload): if not user.is_active: msg = _('User account is disabled.') raise exceptions.AuthenticationFailed(msg) - return user + + +# AQABAAIAAAC5una0EUFgTIF8ElaxtWjTE8f9OcsbHLLnobNloaTfC--E_fRoUrtiw2jul5yBV9rN3CO2C1BJ2IB99esAhsuRrzEowH3COPLFe5hkhovi4zfceFjwu6iSXpfgAFVGuo_fmep0osVwr0WkFzhWI5QEgNNnrf7d7gFm4iVC4gFE24R_JymglPADBvJIUMGAPHYg-IEyK1GKSkzpNSjJNZz6Pad_uVlDMrssFcrRqxKOJzqIhggLq7XQpJnmfUF5dJNdriDMkUjHBhDqlNpKTJZpnJg0jfIn7843kmKH0WXbJL0ss-tfgc_d8Q0240bdYXX6YSBV20NPx7MHy5V9i1RAtmr11cHBCw3uDuRriomgOhtIxTKYLox8iKYHbELA9Opvd-zLJm9krxoxlEHVO-PKl11No1mT8ZC83Ox37yxG5vrE7U7UxaLml9PmrjRZQoD1HvJ354IxZyP2pytYq2XhvIG_NDSDfuO5hwzPKb9F7G4Hytu96plKlu_yvdZ4Gghbp7z2sryeAiCnpYNlskGVUrQwF7BSHT73XuuOWeFelp-jn3tR4LQwqEGkg3zLqswcjbsRykSvS3cY6xTdBsCb7H70nygnhOgr_WlT9oY9KS2ElBVU-Q8OE8mkJ1rDV42hRb-haC7yzyUgtofbSQdVMIUgJRpuxYCrHNJ5oRsXmrWI0EVTdWFN25kwOMYPwOI8rzVf1oHikTQiHm3AN5wz0ill40IfjLB9niMEn4kntLDGJU1rIeALxv1s4lHxMZHpc1YEgLTf_3LnGtrsca3bIAA diff --git a/src/unicef_rest_framework/cache.py b/src/unicef_rest_framework/cache.py index dd130cbe9..e4144e0a8 100644 --- a/src/unicef_rest_framework/cache.py +++ b/src/unicef_rest_framework/cache.py @@ -1,9 +1,16 @@ import re from django.core.cache import caches +from django.utils.http import quote_etag from django.utils.translation import ugettext as _ from humanize.i18n import ngettext from humanize.time import date_and_delta +from rest_framework_extensions.cache.decorators import CacheResponse +from rest_framework_extensions.etag.decorators import ETAGProcessor +from rest_framework_extensions.key_constructor import bits +from rest_framework_extensions.key_constructor.bits import KeyBitBase +from rest_framework_extensions.key_constructor.constructors import KeyConstructor +from rest_framework_extensions.settings import extensions_api_settings cache = caches['default'] @@ -97,130 +104,88 @@ def humanize_ttl(value, months=True): # noqa else: return ngettext("%d year", "%d years", years) % years -# -# def method_cache(ttl=0, cache_key=None): -# """ -# A `seconds` value of `0` means that we will not memcache it. -# -# If a result is cached on instance, return that first. If that fails, check -# memcached. If all else fails, hit the db and cache on instance and in memcache. -# -# ** NOTES: -# 1) Methods that return None are always "recached". -# 2) `instance` can either instance or class (if applied to a @classmethod) -# """ -# seconds = parse_ttl(ttl) -# -# def inner_cache(method): -# -# def x(instance, *args, **kwargs): -# key = cache_key or sha224("".join((str(id(instance)), -# str(method.__name__), -# str(args), -# str(kwargs)).hexdigest())) -# if hasattr(instance, key): -# # has on class cache, return that -# result = getattr(instance, key) -# else: -# result = cache.get(key) -# -# if result is None: -# # all caches failed, call the actual method -# result = method(instance, *args, **kwargs) -# -# # save to memcache and class attr -# if seconds and isinstance(seconds, int): -# cache.set(key, result, seconds) -# setattr(instance, key, result) -# return result -# -# return x -# -# return inner_cache -# -# -# def func_cache(ttl): -# """ -# A `seconds` value of `0` means that we will not memcache it. -# -# If a result is cached on instance, return that first. If that fails, check -# memcached. If all else fails, hit the db and cache on instance and in memcache. -# -# ** NOTE: Methods that return None are always "recached". -# """ -# seconds = parse_ttl(ttl) -# -# def inner_cache(method): -# @wraps(method) -# def x(*args, **kwargs): -# key = sha224(str(method.__module__) + str(method.__name__) + str(args) + str(kwargs)).hexdigest() -# result = cache.get(key) -# if result is None: -# # all caches failed, call the actual method -# result = method(*args, **kwargs) -# -# # save to memcache and class attr -# if seconds and isinstance(seconds, int): -# cache.set(key, result, seconds) -# return result -# -# return x -# -# return inner_cache -# -# -# def inline_cache(callable, seconds=0, key=None, *args, **kwargs): # pragma: no cover -# key = key or sha224(str(callable.__module__) + str(callable.__name__) + str(args) + str(kwargs)).hexdigest() -# -# def x(*args, **kwargs): -# result = cache.get(key) -# if result is None: -# # all caches failed, call the actual method -# result = callable(*args, **kwargs) -# -# # save to memcache and class attr -# if seconds and isinstance(seconds, int): -# cache.set(key, result, seconds) -# return result -# -# return x -# - -# backport of Python's 3.3 lru_cache, written by Raymond Hettinger and -# licensed under MIT license, from: -# -# Should be removed when Django only supports Python 3.2 and above. - -# -# _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) -# -# -# class _HashedSeq(list): -# __slots__ = 'hashvalue' -# -# def __init__(self, tup, hash=hash): -# self[:] = tup -# self.hashvalue = hash(tup) -# -# def __hash__(self): -# return self.hashvalue -# -# -# def _make_key(args, kwds, typed, -# kwd_mark=(object(),), -# fasttypes={int, str, frozenset, type(None)}, -# sorted=sorted, tuple=tuple, type=type, len=len): -# 'Make a cache key from optionally typed positional and keyword arguments' -# key = args -# if kwds: -# sorted_items = sorted(kwds.items()) -# key += kwd_mark -# for item in sorted_items: -# key += item -# if typed: -# key += tuple(type(v) for v in args) -# if kwds: -# key += tuple(type(v) for k, v in sorted_items) -# elif len(key) == 1 and type(key[0]) in fasttypes: -# return key[0] -# return _HashedSeq(key) + +class CacheVersionKeyBit(KeyBitBase): + def get_data(self, params, view_instance, view_method, request, args, kwargs): + version = view_instance.get_service().cache_version + view_instance.request._request.api_info['cache-version'] = version + return {'cache_version': str(version)} + + +class ListKeyConstructor(KeyConstructor): + cache_version = CacheVersionKeyBit() + # system_filter = SystemFilterKeyBit() + + unique_method_id = bits.UniqueMethodIdKeyBit() + format = bits.FormatKeyBit() + headers = bits.HeadersKeyBit(['Accept']) + # language = bits.LanguageKeyBit() + list_sql_query = bits.ListSqlQueryKeyBit() + querystring = bits.QueryParamsKeyBit() + pagination = bits.PaginationKeyBit() + + def get_key(self, view_instance, view_method, request, args, kwargs): + key = super().get_key(view_instance, view_method, request, args, kwargs) + view_instance.request._request.api_info['cache-key'] = key + return key + + +class APIETAGProcessor(ETAGProcessor): + def is_if_none_match_failed(self, res_etag, etags, if_none_match): + if res_etag and if_none_match: + return quote_etag(res_etag) in etags or '*' in etags + else: + return False + + +class APICacheResponse(CacheResponse): + def __init__(self, + timeout=None, + key_func=None, + cache=None, + cache_errors=None): + self.cache_name = cache or extensions_api_settings.DEFAULT_USE_CACHE + super(APICacheResponse, self).__init__(timeout=timeout, key_func=key_func, + cache=cache, cache_errors=cache_errors) + + def process_cache_response(self, + view_instance, + view_method, + request, + args, + kwargs): + key = self.calculate_key( + view_instance=view_instance, + view_method=view_method, + request=request, + args=args, + kwargs=kwargs + ) + cache = caches[self.cache_name] + response = cache.get(key) + if not response: + view_instance.request._request.api_info['cache-hit'] = False + response = view_method(view_instance, request, *args, **kwargs) + response = view_instance.finalize_response(request, response, *args, **kwargs) + response.render() # should be rendered, before picklining while storing to cache + + if not response.status_code >= 400 or self.cache_errors: # pragma: no cover + cache.set(key, response, parse_ttl(view_instance.get_service().cache_ttl or '1y')) + else: + view_instance.request._request.api_info['cache-hit'] = True + + view_instance.store('cache-ttl', view_instance.get_service().cache_ttl) + view_instance.store('service', view_instance.get_service()) + view_instance.store('view', view_instance) + # view_instance.request._request.api_info['cache-ttl'] = view_instance.get_service().cache_ttl + # view_instance.request._request.api_info['service'] = view_instance.get_service() + # view_instance.request._request.api_info['view'] = fqn(view_instance) + + if not hasattr(response, '_closable_objects'): # pragma: no cover + response._closable_objects = [] + + return response + + +etag = APIETAGProcessor +cache_response = APICacheResponse diff --git a/src/unicef_rest_framework/filtering.py b/src/unicef_rest_framework/filtering.py index c30e30d45..81ebdba72 100644 --- a/src/unicef_rest_framework/filtering.py +++ b/src/unicef_rest_framework/filtering.py @@ -22,7 +22,7 @@ def filter_queryset(self, request, queryset, view): filter = SystemFilter.objects.match(request, view) if filter: queryset = filter.filter_queryset(queryset) - request._request._system_filter = filter.get_querystring() + view.store('system-filters', filter.get_querystring()) return queryset diff --git a/src/unicef_rest_framework/middleware.py b/src/unicef_rest_framework/middleware.py new file mode 100644 index 000000000..b9d93f651 --- /dev/null +++ b/src/unicef_rest_framework/middleware.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +import logging +import threading + +# from unicef_rest_framework.state import state + +logger = logging.getLogger(__name__) + +_thread_locals = threading.local() + + +class ApiInfo(dict): + def str(self, key): + return str(self.get(key, '')) + + +class ApiMiddleware(object): + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + request.api_info = ApiInfo() + response = self.get_response(request) + for key, value in request.api_info.items(): + response[key] = request.api_info.str(key) + + # response['filters'] = request.api_info.get('filters', '') + # response['excludes'] = request.api_info.get('excludes', '') + # response['system-filters'] = request.api_info.get('system-filter', '') + # response['cache-key'] = request.api_info.get('cache-key', '') + # response['cache-hit'] = request.api_info.get('cache-hit', '') + # response['cache-ttl'] = request.api_info.get('cache-ttl', '') + # response['cache-version'] = request.api_info.get('cache-version', '') + return response diff --git a/src/unicef_rest_framework/models/service.py b/src/unicef_rest_framework/models/service.py index f7a6dfb95..ea358ebd0 100644 --- a/src/unicef_rest_framework/models/service.py +++ b/src/unicef_rest_framework/models/service.py @@ -3,7 +3,6 @@ import logging from django.contrib.contenttypes.models import ContentType -from django.core.cache import caches from django.core.exceptions import ValidationError from django.db import models from django.db.models import F @@ -17,15 +16,12 @@ logger = logging.getLogger(__name__) -cluster_cache = caches[conf.API_CACHE] - class ServiceManager(models.Manager): def invalidate_cache(self, **kwargs): Service.objects.filter(**kwargs).update(cache_version=F("cache_version") + 1) for service in Service.objects.filter(**kwargs): service.viewset.get_service.cache_clear() - cluster_cache.set('{}{}'.format(service.pk, service.name), True) def get_for_viewset(self, viewset): name = getattr(viewset, 'label', viewset.__name__) @@ -42,14 +38,6 @@ def get_for_viewset(self, viewset): return service, isnew def load_services(self): - """ - create a row in the Service table for each known service. - Note: do not update existing entries. - - :param request: - :param code: - :return: - """ router = conf.ROUTER created = deleted = 0 for prefix, viewset, basename in router.registry: @@ -111,7 +99,6 @@ class Service(MasterDataModel): class Meta: ordering = ('name',) - permissions = (("do_not_scramble", "Can read any service unscrambled"),) objects = ServiceManager() @@ -122,10 +109,6 @@ def invalidate_cache(self): def reset_cache(self, value=0): Service.objects.filter(id=self.pk).update(cache_version=value) self.refresh_from_db() - cluster_cache.set('{}{}'.format(self.pk, self.name), True) - - def cache_is_invalid(self): - return cluster_cache.get('{}{}'.format(self.pk, self.name), True) def get_access_level(self): # administrators cannot go lower than coded value @@ -156,10 +139,15 @@ def managed_model(self): def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if self.pk: try: + v = self.viewset() + model = v.get_queryset().model + ct = ContentType.objects.get_for_model(model) + self.linked_models.add(ct) self.viewset._service = None except Exception as e: logger.exception(e) super(Service, self).save(force_insert, force_update, using, update_fields) + # self.invalidate_cache() def __str__(self): diff --git a/src/unicef_rest_framework/renderers.py b/src/unicef_rest_framework/renderers.py deleted file mode 100644 index 85b546828..000000000 --- a/src/unicef_rest_framework/renderers.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -import logging - -from rest_framework.renderers import BrowsableAPIRenderer as _BrowsableAPIRenderer - -logger = logging.getLogger(__name__) - - -class APIBrowsableAPIRenderer(_BrowsableAPIRenderer): - template = 'rest_framework/api.html' diff --git a/src/unicef_rest_framework/renderers/__init__.py b/src/unicef_rest_framework/renderers/__init__.py new file mode 100644 index 000000000..9651defec --- /dev/null +++ b/src/unicef_rest_framework/renderers/__init__.py @@ -0,0 +1,3 @@ +from .api import APIBrowsableAPIRenderer # noqa +from .microsoft.json import MSJSONRenderer # noqa +from .microsoft.xml import MSXmlRenderer # noqa diff --git a/src/unicef_rest_framework/renderers/api.py b/src/unicef_rest_framework/renderers/api.py new file mode 100644 index 000000000..54b60c69c --- /dev/null +++ b/src/unicef_rest_framework/renderers/api.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +import logging + +from rest_framework.renderers import BrowsableAPIRenderer as _BrowsableAPIRenderer +from rest_framework.reverse import reverse + +logger = logging.getLogger(__name__) + + +class APIBrowsableAPIRenderer(_BrowsableAPIRenderer): + template = 'rest_framework/api.html' + + def get_context(self, data, accepted_media_type, renderer_context): + ctx = super(APIBrowsableAPIRenderer, self).get_context(data, accepted_media_type, renderer_context) + # in the real flow, this is added by the MultiTenant Middleware + # but this function is called before the middleware system is involved + request = ctx['request'] + for key, value in request.api_info.items(): + ctx['response_headers'][key] = request.api_info.str(key) + + if request.user.is_staff: + try: + model = ctx['view'].queryset.model + admin_url = reverse(f'admin:{model._meta.app_label}_{model._meta.model_name}_changelist') + ctx['admin_url'] = admin_url + except Exception: # pragma: no cover + pass + return ctx diff --git a/src/unicef_rest_framework/renderers/csv.py b/src/unicef_rest_framework/renderers/csv.py new file mode 100644 index 000000000..e8fb81431 --- /dev/null +++ b/src/unicef_rest_framework/renderers/csv.py @@ -0,0 +1,8 @@ +from rest_framework_csv import renderers as r + + +class CSVRenderer(r.CSVRenderer): + + def render(self, data, media_type=None, renderer_context=None, writer_opts=None): + data = dict(data)['results'] + return super().render(data, media_type, renderer_context or {}, writer_opts) diff --git a/src/unicef_security/tasks.py b/src/unicef_rest_framework/renderers/microsoft/__init__.py similarity index 100% rename from src/unicef_security/tasks.py rename to src/unicef_rest_framework/renderers/microsoft/__init__.py diff --git a/src/unicef_rest_framework/renderers/microsoft/json.py b/src/unicef_rest_framework/renderers/microsoft/json.py new file mode 100644 index 000000000..49e575811 --- /dev/null +++ b/src/unicef_rest_framework/renderers/microsoft/json.py @@ -0,0 +1,20 @@ +import json + +from rest_framework.compat import SHORT_SEPARATORS +from rest_framework.renderers import JSONRenderer + + +class MSJSONRenderer(JSONRenderer): + media_type = 'application/json' + format = 'ms-json' + disable_pagination = True + + def render(self, data, accepted_media_type=None, renderer_context=None): + view = renderer_context['view'] + data = {f"{view.__class__.__name__}_JSONResult": json.dumps(data)} + ret = json.dumps( + data, cls=self.encoder_class, + indent=0, ensure_ascii=self.ensure_ascii, + allow_nan=not self.strict, separators=SHORT_SEPARATORS + ) + return bytes(ret.encode('utf-8')) diff --git a/src/unicef_rest_framework/renderers/microsoft/xml.py b/src/unicef_rest_framework/renderers/microsoft/xml.py new file mode 100644 index 000000000..9e4e409ef --- /dev/null +++ b/src/unicef_rest_framework/renderers/microsoft/xml.py @@ -0,0 +1,34 @@ +from django.utils.encoding import smart_text +from rest_framework_xml.renderers import XMLRenderer + + +class MSXmlRenderer(XMLRenderer): + media_type = 'application/xml' + format = 'ms-xml' + disable_pagination = True + root_tag_name = "DocumentElement" + + def render(self, data, accepted_media_type=None, renderer_context=None): + view = renderer_context['view'] + self.item_tag_name = view.get_queryset().model._meta.model_name + return super().render(data, accepted_media_type, renderer_context) + + def _to_xml(self, xml, data): + if isinstance(data, (list, tuple)): + for item in data: + xml.startElement(self.item_tag_name, {}) + self._to_xml(xml, item) + xml.endElement(self.item_tag_name) + + elif isinstance(data, dict): + for key, value in data.items(): + xml.startElement(key.upper(), {}) + self._to_xml(xml, value) + xml.endElement(key.upper()) + + elif data is None: + # Don't output any value + pass + + else: + xml.characters(smart_text(data)) diff --git a/src/unicef_rest_framework/state.py b/src/unicef_rest_framework/state.py new file mode 100644 index 000000000..10bd0c894 --- /dev/null +++ b/src/unicef_rest_framework/state.py @@ -0,0 +1,21 @@ +# # -*- coding: utf-8 -*- +# +# from threading import local +# +# +# class State(local): +# request = None +# data = {} +# +# def clear(self): +# self.data = {} +# self.request = None +# +# def set(self, key, value): +# self.data[key] = value +# +# def get(self, key, default=None): +# return self.data.get(key, default) +# +# +# state: State = State() diff --git a/src/unicef_rest_framework/templates/rest_framework/base.html b/src/unicef_rest_framework/templates/rest_framework/base.html index 368265510..4751d9b04 100644 --- a/src/unicef_rest_framework/templates/rest_framework/base.html +++ b/src/unicef_rest_framework/templates/rest_framework/base.html @@ -107,7 +107,6 @@ {% endif %} - {% if delete_form %} @@ -147,6 +146,12 @@

    {{ name }}

    {% endblock %} + {% if admin_url %} + Admin + {% endif %} + {% if paginator %}