diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..25af25f Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9aea02a --- /dev/null +++ b/.gitignore @@ -0,0 +1,47 @@ +*.py[cod] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.tox +nosetests.xml + +# Translations +*.mo + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# Complexity +output/*.html +output/*/index.html + +# Sphinx +docs/_build + +.webassets-cache + +# Virtualenvs +env +.python-version diff --git a/.idea/.name b/.idea/.name new file mode 100644 index 0000000..37b3833 --- /dev/null +++ b/.idea/.name @@ -0,0 +1 @@ +stat-tracker \ No newline at end of file diff --git a/.idea/encodings.xml b/.idea/encodings.xml new file mode 100644 index 0000000..d821048 --- /dev/null +++ b/.idea/encodings.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..11b61f7 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..d502ed4 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/scopes/scope_settings.xml b/.idea/scopes/scope_settings.xml new file mode 100644 index 0000000..922003b --- /dev/null +++ b/.idea/scopes/scope_settings.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/stat-tracker.iml b/.idea/stat-tracker.iml new file mode 100644 index 0000000..66f9860 --- /dev/null +++ b/.idea/stat-tracker.iml @@ -0,0 +1,16 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml new file mode 100644 index 0000000..e63f7ed --- /dev/null +++ b/.idea/workspace.xml @@ -0,0 +1,252 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1425495258786 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.ipynb_checkpoints/Untitled0-checkpoint.ipynb b/.ipynb_checkpoints/Untitled0-checkpoint.ipynb new file mode 100644 index 0000000..98001ee --- /dev/null +++ b/.ipynb_checkpoints/Untitled0-checkpoint.ipynb @@ -0,0 +1,9 @@ +{ + "metadata": { + "name": "", + "signature": "sha256:475af10b88b1c0c96bf2bd87512e7381f88959936e3ebf14a69e8118e93b6e92" + }, + "nbformat": 3, + "nbformat_minor": 0, + "worksheets": [] +} \ No newline at end of file diff --git a/Procfile b/Procfile new file mode 100644 index 0000000..b3c7363 --- /dev/null +++ b/Procfile @@ -0,0 +1 @@ +web: gunicorn manager:app --log-file=- diff --git a/README.md b/README.md index 5e7bab7..eb659d4 100644 --- a/README.md +++ b/README.md @@ -1,113 +1,3 @@ -# Stat Tracker +# stat-tracker -## Description - -Build an application people can use to track any stats they want about themselves. - -## Objectives - -### Learning Objectives - -After completing this assignment, you should understand: - -* ... - -### Performance Objectives - -After completing this assignment, you should be able to: - -* ... - -## Details - -### Deliverables - -* A Git repo called stat-tracker containing at least: - * `README.md` file explaining how to run your project - * a `requirements.txt` file - * a way to seed your application with data -* An instance of your app running on Heroku - -### Requirements - -* No PEP8 or Pyflakes warnings or errors -* Meets API specifications - -## Normal Mode - -You are going to build an application to track personal statistics. A personal statistic is a numerical record for a person in a time series by day. For example, let's say I wanted to track how many flights of stairs I walked up in a day. My last week might look like: - -Date | Flights ----------- | ------- -02/19/2015 | 8 -02/20/2015 | 6 -02/21/2015 | 7 -02/22/2015 | 6 -02/23/2015 | 8 -02/24/2015 | 4 -02/25/2015 | 6 - -Users of your application can create as many different things to track as they want. They should have an easy-to-use interface to track their stats, allowing them to enter the number for the current day or any previous day. - -You should allow for: - -* User registration -* User login -* Creating a new stat to track -* Recording a stat for a day -* Editing a stat for a day -* Showing a chart for a stat for any series of dates, defined by a start and stop date. The default should be the last 30 days. - -For the chart, you can use whatever you like. Matplotlib is our old friend, but can be unwieldy. [Bokeh](http://bokeh.pydata.org/en/latest/) and [Plotly](https://plot.ly/python/) are other good choices to use with HTML. - -You should also have an API. One of the ways people expect to use this application is via their phone, so you'll need a REST API. - -### API Specification - -For your API, I'm specifying the endpoints you'll need and what they should do. The URLs I'm using are not prefixed: yours should be. -All the endpoints require authentication using HTTP Basic Auth. - -Verb | URL | Action ------- | ---- | ------- -GET | /stats | Show a list of all stats I am tracking, and links to their individual pages -POST | /stats | Create a new stat for me to track. -GET | /stats/{id} | Show information about one stat I am tracking, and give me the data I have recorded for that stat. -PUT | /stats/{id} | Update one stat I am tracking, changing attributes such as name or type. Does not allow for changing tracked data. -DELETE | /stats/{id} | Delete a stat I am tracking. This should remove tracked data for that stat as well. -POST or PUT | /stats/{id}/data | Add tracked data for a day. The JSON sent with this should include the day tracked. You can also override the data for a day already recorded. -DELETE | /stats/{id}/data | Remove tracked data for a day. You should send JSON that includes the date to be removed. - -I am not specifying what the JSON these return should look like, but you should feel free to follow one of the many competing standards. [JSON API](http://jsonapi.org/) is very comprehensive. - - -## Hard Mode - -In addition to the requirements from **Normal Mode**: - -* Users should be able to record different types of stats. You can choose the types, but here are some suggestions: - * Clicker-style stats. The UI on these should change so you have a way to increase them by one via a button click. Good for tracking things as you're doing them. - * Time-goal stats. The stat has a beginning value, ending value, and ending date. Track as normal, but you should be able to see if you're on track to meet your goal. Examples: weight loss, building up for a long run. - * Yes-no stats. Did I do this today? This is often called the "Seinfeld calendar" or [chain calendar](http://chaincalendar.com/about). - * Stats on a scale instead of unbounded. Example: On a scale of 1 to 5, what's my happiness level today? - -* Make sure your interface [is responsive](https://developers.google.com/web/fundamentals/layouts/rwd-fundamentals/) and works well via mobile. - - -## Nightmare Mode - -* Give users a way to invite other users to collaborate/compete on a stat with them. Users can only add/edit their own data, but the stat charts will show everyone competing. - - -## Additional Resources - -* [JSON API](http://jsonapi.org/) -* [Bokeh](http://bokeh.pydata.org/en/latest/) -* [Plotly](https://plot.ly/python/) -* [Flask-RESTful](https://flask-restful.readthedocs.org/en/0.3.1/). A Flask plugin that could help or make this much worse. -* [RESTless](http://restless.readthedocs.org/en/latest/). Another Python library that could help or harm. -* [Kube](http://imperavi.com/kube/). A simpler CSS framework I've been using. -* [Peewee](https://peewee.readthedocs.org/en/latest/index.html). A less-featureful, but perhaps easier to use ORM. - -## Credit - -... +A scorecard for your day. diff --git a/Untitled0.ipynb b/Untitled0.ipynb new file mode 100644 index 0000000..c055fd0 --- /dev/null +++ b/Untitled0.ipynb @@ -0,0 +1,316 @@ +{ + "metadata": { + "name": "", + "signature": "sha256:7b13ea8a4361fc67b875d8c845b8aab4047c7a75d5728130e0cd385589ad9840" + }, + "nbformat": 3, + "nbformat_minor": 0, + "worksheets": [ + { + "cells": [ + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from faker import Faker\n", + "import random\n", + "import datetime\n" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 1 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "fake = Faker()\n" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 2 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "for _ in range(200):\n", + " print(fake.date_time_between(start_date='-y', end_date='now'))" + ], + "language": "python", + "metadata": {}, + "outputs": [ + { + "output_type": "stream", + "stream": "stdout", + "text": [ + "2003-04-27 23:43:34\n", + "2003-04-21 09:50:57\n", + "2013-10-31 16:22:42\n", + "2009-08-23 08:10:30\n", + "2003-09-07 23:46:26\n", + "2007-08-14 20:23:01\n", + "2004-11-24 23:36:20\n", + "2006-11-04 19:03:00\n", + "2009-12-16 07:59:05\n", + "2013-07-04 08:14:11\n", + "2012-07-07 17:03:18\n", + "2014-08-08 07:39:34\n", + "2013-12-12 01:56:17\n", + "2012-02-29 17:18:40\n", + "2006-10-27 03:59:36\n", + "2006-08-29 14:17:46\n", + "2004-10-11 20:53:08\n", + "2008-06-02 03:46:54\n", + "2004-04-26 14:10:33\n", + "2009-07-04 06:44:04\n", + "2008-06-04 11:03:30\n", + "2007-08-25 08:59:10\n", + "2012-07-29 14:59:22\n", + "2009-05-29 03:04:31\n", + "2003-08-09 05:46:44\n", + "2009-03-17 20:15:42\n", + "2004-02-25 01:43:11\n", + "2010-05-02 07:56:55\n", + "2013-09-06 20:41:51\n", + "2012-06-30 06:42:34\n", + "2008-08-14 04:14:40\n", + "2010-09-13 04:05:03\n", + "2005-10-04 00:51:31\n", + "2011-03-18 05:49:47\n", + "2008-12-27 23:40:49\n", + "2015-01-11 10:27:27\n", + "2010-05-13 13:21:47\n", + "2008-09-30 13:50:10\n", + "2008-01-20 10:22:52\n", + "2008-06-03 23:15:55\n", + "2014-01-08 13:29:07\n", + "2012-09-18 10:33:11\n", + "2007-09-03 18:20:19\n", + "2011-12-23 18:07:50\n", + "2010-06-15 03:24:10\n", + "2012-12-04 19:41:26\n", + "2010-08-05 09:22:12\n", + "2003-12-29 02:17:15\n", + "2006-10-30 13:44:00\n", + "2012-11-14 20:48:52\n", + "2011-10-31 07:24:52\n", + "2013-01-24 00:29:19\n", + "2012-12-28 21:00:05\n", + "2011-05-24 11:56:44\n", + "2006-02-26 18:53:18\n", + "2007-09-14 10:18:24\n", + "2009-04-19 16:04:24\n", + "2011-08-30 03:36:29\n", + "2008-07-11 18:49:54\n", + "2006-05-17 06:05:21\n", + "2003-04-23 08:16:13\n", + "2011-08-26 02:40:22\n", + "2005-07-09 04:57:54\n", + "2011-12-25 03:55:32\n", + "2007-04-17 07:28:25\n", + "2006-05-02 05:22:28\n", + "2006-10-28 02:55:34\n", + "2006-09-10 23:59:20\n", + "2008-07-21 15:56:46\n", + "2005-07-04 17:24:41\n", + "2009-04-26 17:57:57\n", + "2006-04-19 03:30:11\n", + "2004-09-05 17:37:02\n", + "2007-01-14 05:06:45\n", + "2008-05-20 01:55:36\n", + "2011-11-26 21:15:00\n", + "2005-12-04 17:08:23\n", + "2012-04-05 18:21:12\n", + "2008-04-14 16:48:04\n", + "2007-06-24 21:22:43\n", + "2006-06-25 19:34:07\n", + "2011-10-31 22:49:35\n", + "2005-11-04 02:50:17\n", + "2011-12-04 19:57:51\n", + "2013-01-26 14:01:55\n", + "2007-09-28 06:31:59\n", + "2008-08-01 03:07:36\n", + "2012-01-22 15:44:16\n", + "2013-02-21 16:13:47\n", + "2013-06-25 00:09:22\n", + "2010-01-17 00:44:57\n", + "2013-10-19 04:41:26\n", + "2013-08-04 04:43:07\n", + "2007-09-10 10:43:55\n", + "2005-06-01 22:43:50\n", + "2009-01-02 13:47:08\n", + "2005-12-18 21:52:25\n", + "2005-06-26 15:26:30\n", + "2013-08-23 22:07:50\n", + "2013-04-02 18:51:34\n", + "2012-09-24 02:50:57\n", + "2004-02-24 07:12:11\n", + "2013-08-01 03:40:51\n", + "2013-04-06 20:08:24\n", + "2010-03-06 17:48:39\n", + "2004-03-11 05:16:19\n", + "2014-03-28 05:48:52\n", + "2006-10-28 20:00:11\n", + "2008-03-18 19:10:08\n", + "2008-02-17 11:11:23\n", + "2013-11-23 07:39:23\n", + "2014-11-22 16:11:46\n", + "2013-04-08 07:12:23\n", + "2008-12-27 17:22:52\n", + "2010-01-13 22:26:38\n", + "2003-10-16 16:45:37\n", + "2010-07-09 10:02:18\n", + "2010-10-03 19:38:51\n", + "2010-04-25 06:28:28\n", + "2007-04-20 09:55:52\n", + "2008-10-02 08:59:28\n", + "2004-06-16 10:11:09\n", + "2006-03-15 04:11:10\n", + "2007-07-02 16:36:21\n", + "2006-01-15 18:24:46\n", + "2011-07-04 13:32:18\n", + "2007-08-31 19:04:47\n", + "2009-10-09 10:20:14\n", + "2012-05-03 03:38:48\n", + "2010-12-06 13:57:21\n", + "2014-04-29 16:51:43\n", + "2003-05-29 05:04:38\n", + "2004-04-01 21:59:41\n", + "2011-07-15 14:37:11\n", + "2005-06-12 12:54:10\n", + "2010-09-12 05:25:50\n", + "2006-09-25 07:16:51\n", + "2013-09-04 01:45:25\n", + "2014-11-02 10:29:48\n", + "2008-08-14 11:36:12\n", + "2009-10-19 12:44:30\n", + "2004-02-09 17:48:35\n", + "2011-01-12 02:54:20\n", + "2011-06-25 19:42:33\n", + "2009-02-11 13:20:14\n", + "2009-03-24 16:31:16\n", + "2006-05-20 12:50:48\n", + "2010-11-29 22:12:01\n", + "2004-02-29 05:35:01\n", + "2014-10-04 14:58:10\n", + "2010-06-15 08:33:16\n", + "2005-10-29 00:41:13\n", + "2009-02-01 19:27:23\n", + "2012-10-07 00:35:00\n", + "2008-08-18 02:04:29\n", + "2013-03-16 11:09:44\n", + "2012-12-16 11:10:10\n", + "2010-07-28 05:20:50\n", + "2007-09-01 18:44:07\n", + "2012-12-22 03:39:55\n", + "2004-07-14 21:36:55\n", + "2007-09-27 08:23:56\n", + "2008-11-26 08:37:08\n", + "2009-05-31 02:37:43\n", + "2011-03-01 07:53:14\n", + "2012-08-10 01:54:34\n", + "2011-11-30 06:53:37\n", + "2014-03-13 11:37:24\n", + "2012-01-17 18:55:59\n", + "2005-06-16 07:10:05\n", + "2011-01-10 13:36:50\n", + "2003-06-21 18:53:26\n", + "2012-10-11 11:02:28\n", + "2010-12-04 01:35:27\n", + "2012-12-25 12:24:01\n", + "2012-01-15 22:56:30\n", + "2009-12-02 11:11:41\n", + "2009-03-27 18:49:06\n", + "2008-12-03 15:26:51\n", + "2014-09-14 10:39:16\n", + "2011-09-23 15:19:00\n", + "2014-01-10 00:26:10\n", + "2011-12-14 07:26:10\n", + "2004-05-18 02:59:38\n", + "2005-06-07 10:53:09\n", + "2011-08-14 12:45:23\n", + "2004-08-01 13:33:57\n", + "2012-03-31 18:35:16\n", + "2011-11-10 11:48:08\n", + "2007-03-02 00:30:15\n", + "2014-07-23 12:48:44\n", + "2011-08-07 06:59:12\n", + "2010-03-31 21:44:03\n", + "2007-01-08 09:00:13\n", + "2007-10-29 13:14:28\n", + "2008-10-01 08:44:06\n", + "2012-07-07 09:41:36\n", + "2010-10-18 01:26:19\n", + "2011-01-05 21:37:24\n", + "2005-05-23 17:22:40\n" + ] + } + ], + "prompt_number": 11 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "import plotly.plotly as py" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 1 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "from plotly.graph_objs import *" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 2 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [ + "import plotly.plotly as py\n", + "from plotly.graph_objs import *\n", + "import plotly.tools as tls\n", + "tls.set_credentials_file(username='brundleflytelepod', api_key='uhht9a4igs')\n", + "\n", + "\n", + "\n", + "trace0 = Scatter(\n", + " x=[1, 2, 3, 4],\n", + " y=[10, 15, 13, 17]\n", + ")\n", + "trace1 = Scatter(\n", + " x=[1, 2, 3, 4],\n", + " y=[16, 5, 11, 9]\n", + ")\n", + "data = Data([trace0, trace1])\n", + "\n", + "unique_url = py.plot(data, filename = 'basic-line')" + ], + "language": "python", + "metadata": {}, + "outputs": [], + "prompt_number": 6 + }, + { + "cell_type": "code", + "collapsed": false, + "input": [], + "language": "python", + "metadata": {}, + "outputs": [] + } + ], + "metadata": {} + } + ] +} \ No newline at end of file diff --git a/manage.py b/manage.py new file mode 100644 index 0000000..ecc06ea --- /dev/null +++ b/manage.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +import os + +from flask.ext.script import Manager, Shell, Server +from flask.ext.migrate import MigrateCommand +from flask.ext.script.commands import ShowUrls, Clean + +from stat_tracker import create_app, db, models + +HERE = os.path.abspath(os.path.dirname(__file__)) +TEST_PATH = os.path.join(HERE, 'tests') + +app = create_app() +manager = Manager(app) +manager.add_command('server', Server()) +manager.add_command('db', MigrateCommand) +manager.add_command('show-urls', ShowUrls()) +manager.add_command('clean', Clean()) + + +@manager.shell +def make_shell_context(): + """ Creates a python REPL with several default imports + in the context of the app + """ + + return dict(app=app, db=db) + + +@manager.command +def test(): + """Run the tests.""" + import pytest + exit_code = pytest.main([TEST_PATH, '--verbose']) + return exit_code + + +if __name__ == '__main__': + manager.run() diff --git a/migrations/README b/migrations/README new file mode 100755 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..f8ed480 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..70961ce --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,73 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from flask import current_app +config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url) + + with context.begin_transaction(): + context.run_migrations() + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + connection = engine.connect() + context.configure( + connection=connection, + target_metadata=target_metadata + ) + + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() + diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100755 index 0000000..9570201 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,22 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/160f2bee6fa_.py b/migrations/versions/160f2bee6fa_.py new file mode 100644 index 0000000..77ecde5 --- /dev/null +++ b/migrations/versions/160f2bee6fa_.py @@ -0,0 +1,26 @@ +"""empty message + +Revision ID: 160f2bee6fa +Revises: 2cbf9c6821b +Create Date: 2015-03-01 23:32:06.382769 + +""" + +# revision identifiers, used by Alembic. +revision = '160f2bee6fa' +down_revision = '2cbf9c6821b' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### diff --git a/migrations/versions/2384594e6b5_.py b/migrations/versions/2384594e6b5_.py new file mode 100644 index 0000000..c35c47d --- /dev/null +++ b/migrations/versions/2384594e6b5_.py @@ -0,0 +1,26 @@ +"""empty message + +Revision ID: 2384594e6b5 +Revises: 160f2bee6fa +Create Date: 2015-03-01 23:33:17.240676 + +""" + +# revision identifiers, used by Alembic. +revision = '2384594e6b5' +down_revision = '160f2bee6fa' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### diff --git a/migrations/versions/277dbc12983_.py b/migrations/versions/277dbc12983_.py new file mode 100644 index 0000000..e62a346 --- /dev/null +++ b/migrations/versions/277dbc12983_.py @@ -0,0 +1,53 @@ +"""empty message + +Revision ID: 277dbc12983 +Revises: None +Create Date: 2015-03-01 12:38:35.405989 + +""" + +# revision identifiers, used by Alembic. +revision = '277dbc12983' +down_revision = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=100), nullable=False), + sa.Column('email', sa.String(length=100), nullable=False), + sa.Column('encrypted_password', sa.String(length=60), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email') + ) + op.create_table('activities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('name', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name') + ) + op.create_table('stat', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('activity_id', sa.Integer(), nullable=True), + sa.Column('ammount', sa.Integer(), nullable=True), + sa.Column('time', sa.Date(), nullable=True), + sa.ForeignKeyConstraint(['activity_id'], ['activities.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('stat') + op.drop_table('activities') + op.drop_table('user') + ### end Alembic commands ### diff --git a/migrations/versions/2cbf9c6821b_.py b/migrations/versions/2cbf9c6821b_.py new file mode 100644 index 0000000..4534dc4 --- /dev/null +++ b/migrations/versions/2cbf9c6821b_.py @@ -0,0 +1,26 @@ +"""empty message + +Revision ID: 2cbf9c6821b +Revises: 277dbc12983 +Create Date: 2015-03-01 23:24:48.499476 + +""" + +# revision identifiers, used by Alembic. +revision = '2cbf9c6821b' +down_revision = '277dbc12983' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + pass + ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..4628e8c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,27 @@ + +Flask==0.10.1 +MarkupSafe==0.23 +Werkzeug==0.10.1 +Jinja2==2.7.3 +itsdangerous==0.24 + +Flask-SQLAlchemy==2.0 +SQLAlchemy==0.9.8 + +Flask-Migrate==1.3.0 + +Flask-WTF==0.11 +WTForms==2.0.2 + +Flask-Login==0.2.11 +Flask-Bcrypt==0.6.2 + +## Config +flask-appconfig==0.9.1 + +## Heroku +gunicorn== 19.2.1 +psycopg2==2.6 + + +fake-factory diff --git a/runtime.txt b/runtime.txt new file mode 100644 index 0000000..bc42bd0 --- /dev/null +++ b/runtime.txt @@ -0,0 +1 @@ +python-3.4.2 diff --git a/stat_tracker/.DS_Store b/stat_tracker/.DS_Store new file mode 100644 index 0000000..818847e Binary files /dev/null and b/stat_tracker/.DS_Store differ diff --git a/stat_tracker/__init__.py b/stat_tracker/__init__.py new file mode 100644 index 0000000..d234ee7 --- /dev/null +++ b/stat_tracker/__init__.py @@ -0,0 +1,29 @@ +from flask import Flask +from flask.ext.wtf import CsrfProtect + +from .extensions import db, migrate, bcrypt, login_manager, config + +from . import models +from .views.users import users +from .views.activities import activities +from .views.api import api + +SQLALCHEMY_DATABASE_URI = "postgres://localhost/stat_tracker" +DEBUG = True +SECRET_KEY = 'development key' + + +def create_app(): + app = Flask("Self dot Quantify") + app.config.from_object(__name__) + app.register_blueprint(users) + app.register_blueprint(activities) + app.register_blueprint(api, url_prefix='/api/v1') + + config.init_app(app) + db.init_app(app) + migrate.init_app(app, db) + login_manager.init_app(app) + bcrypt.init_app(app) + + return app diff --git a/stat_tracker/extensions.py b/stat_tracker/extensions.py new file mode 100644 index 0000000..1fcda53 --- /dev/null +++ b/stat_tracker/extensions.py @@ -0,0 +1,14 @@ +from flask.ext.sqlalchemy import SQLAlchemy +db= SQLAlchemy() + +from flask.ext.migrate import Migrate +migrate = Migrate() + +from flask.ext.bcrypt import Bcrypt +bcrypt = Bcrypt() + +from flask.ext.login import LoginManager +login_manager = LoginManager() + +from flask.ext.appconfig import HerokuConfig +config = HerokuConfig() diff --git a/stat_tracker/forms.py b/stat_tracker/forms.py new file mode 100644 index 0000000..c865379 --- /dev/null +++ b/stat_tracker/forms.py @@ -0,0 +1,42 @@ +from flask_wtf import Form +from wtforms import StringField, PasswordField, IntegerField, DateField +from wtforms.validators import DataRequired, Email, EqualTo, URL, Optional +from wtforms.fields.html5 import EmailField + +class LoginForm(Form): + email = EmailField('Email', validators=[DataRequired(), Email()]) + password = PasswordField('Password', validators=[DataRequired()]) + + +class RegistrationForm(Form): + name = StringField('Name', validators=[DataRequired()]) + email = EmailField('Email', validators=[DataRequired(), Email()]) + password = PasswordField('Password', validators=[DataRequired(), + EqualTo('verify_password', + message='Passwords did not match')]) + verify_password = PasswordField('Verify Password') + + +class AddNewAction(Form): + name = StringField('Action Name', validators=[DataRequired()]) + +class EditAction(Form): + name = StringField('Action Name', validators=[DataRequired()]) + +class AddNewStat(Form): + ammount = IntegerField('Quantity', validators=[DataRequired()]) + date = DateField('Date YYYY-MM-DD', validators=[DataRequired()]) + +class ApiNewStat(Form): + date = DateField('Date YYYY-MM-DD', validators=[Optional()]) + ammount = IntegerField('Quantity', validators=[Optional()]) + +class EditStat(Form): + ammount = IntegerField('Ammount', validators=[DataRequired()]) + +class DateRange(Form): + start = DateField('Start Date', validators=[DataRequired()]) + stop = DateField('Stop Date', validators=[DataRequired()]) + +class DateSearch(Form): + date = DateField('Date', validators=[DataRequired()]) diff --git a/stat_tracker/models.py b/stat_tracker/models.py new file mode 100644 index 0000000..8a3d45c --- /dev/null +++ b/stat_tracker/models.py @@ -0,0 +1,130 @@ +from . import db, bcrypt, login_manager +from flask.ext.login import UserMixin +from sqlalchemy import func, and_ +from datetime import date, timedelta, datetime +from stat_tracker import db +from flask import request, url_for + +@login_manager.user_loader +def load_user(id): + return User.query.get(id) + + +class User(db.Model, UserMixin): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + name = db.Column(db.String(100), nullable=False) + email = db.Column(db.String(100), unique=True, nullable=False) + encrypted_password = db.Column(db.String(60)) + + def get_password(self): + return getattr(self, "_password", None) + + def set_password(self, password): + self._password = password + self.encrypted_password = bcrypt.generate_password_hash(password) + + password = property(get_password, set_password) + + def check_password(self, password): + return bcrypt.check_password_hash(self.encrypted_password, password) + + @property + def user_action(self): + return [activity.name for activity in self.activities] + + def __repr__(self): + return "".format(self.email) + + +class Activities(db.Model): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + user_id = db.Column(db.Integer, db.ForeignKey('user.id')) + name = db.Column(db.String(255), unique=True) + user = db.relationship('User', backref=db.backref('activities', lazy='dynamic')) + stat = db.relationship('Stat', backref=db.backref('activities')) + + + @property + def times_total(self): + count = 0 + stats = Stat.query.filter_by(activity_id=self.id).all() + for stat in stats: + if stat.ammount == None: + stat.ammount = 0 + count += stat.ammount + return count + + + @property + def times_last_7(self): + times = (self.times_range(7)) + count = 0 + for time in times: + if time[1] == None: + time[1] = 0 + count += time[1] + return count + + @property + def times_last_30(self): + times = (self.times_range()) + count = 0 + for time in times: + if time[1] == None: + time[1] = 0 + count += time[1] + return count + + @property + def times_last_365(self): + times = (self.times_range(365)) + count = 0 + for time in times: + if time[1] == None: + time[1] = 0 + count += time[1] + return count + + + def times_range(self, days=30): + days = timedelta(days=days) + date_from = date.today() - days + + stat_date = func.cast(Stat.time, db.Date) + return db.session.query(stat_date, func.sum(Stat.ammount)). \ + group_by(stat_date). \ + filter(and_(Stat.activity_id == self.id, + stat_date >= str(date_from))). \ + order_by(stat_date).all() + + + def custom_time(self, stop, start): + stat_date = func.cast(Stat.time, db.Date) + return db.session.query(stat_date, func.sum(Stat.ammount)). \ + group_by(stat_date). \ + filter(and_(Stat.activity_id == self.id, + stat_date >= str(start), stat_date <= str(stop))). \ + order_by(stat_date).all() + + + + def to_dict(self): + return {'id': self.id, + 'name': self.name, + 'url': str(request.url_root)[:-1:]+str(url_for('api.activity', id=self.id))} + + def __repr__(self): + return "Activity: {}".format(self.name) + + +class Stat(db.Model): + id = db.Column(db.Integer, primary_key=True, autoincrement=True) + user_id = db.Column(db.Integer, db.ForeignKey('user.id')) + activity_id = db.Column(db.Integer, db.ForeignKey('activities.id', ondelete='CASCADE')) + ammount = db.Column(db.Integer) + time = db.Column(db.Date) + + def stat_to_dict(self): + return {'id': self.id, + 'ammount': self.ammount, + 'time': str(self.time)} diff --git a/stat_tracker/views/activities.py b/stat_tracker/views/activities.py new file mode 100644 index 0000000..32420a8 --- /dev/null +++ b/stat_tracker/views/activities.py @@ -0,0 +1,200 @@ +import random +from flask import Blueprint, render_template, redirect, flash, url_for, request, send_file +from flask.ext.login import current_user +from sqlalchemy import desc +from ..forms import AddNewAction, EditAction, AddNewStat, DateRange, EditStat +from ..models import Activities, Stat +from ..extensions import db +from datetime import datetime +from io import BytesIO +import matplotlib.pyplot as plt +import time + +activities = Blueprint("activities", __name__) + + +def flash_errors(form, category="warning"): + '''Flash all errors for a form.''' + for field, errors in form.errors.items(): + for error in errors: + flash("{0} - {1}".format(getattr(form, field).label.text, error), + category) + + + +@activities.route('/add', methods=['GET', 'POST']) +def add_activity(): + user = current_user + form = AddNewAction() + if form.validate_on_submit(): + activity = Activities.query.filter_by(name=form.name.data).first() + if activity in user.activities: + flash('You are already monitoring that activity.') + flash_errors(form) + return render_template('add_activity.html', form=form) + else: + activity = Activities(user_id = current_user.id, + name = form.name.data) + db.session.add(activity) + db.session.commit() + flash('New Activity Added') + return redirect(url_for('users.home_view')) + else: + flash_errors(form) + return render_template('add_activity.html', form=form) + + +@activities.route('/edit/', methods=['GET', 'POST']) +def edit_activity(id): + current = Activities.query.get(id) + form = EditAction(obj=current) + if form.validate_on_submit(): + form.populate_obj(current) + db.session.commit() + flash('Activity Updated') + return redirect(url_for('users.home_view')) + else: + flash_errors(form) + return render_template('edit_activity.html', form=form) + + +@activities.route('/delete/', methods=['GET']) +def delete_activity(id): + current = Activities.query.get(id) + current_stats = Stat.query.filter_by(activity_id = id) + for stats in current_stats: + db.session.delete(stats) + db.session.delete(current) + db.session.commit() + flash('Activity Deleted!!') + return redirect(url_for('users.home_view')) + + +@activities.route('//add', methods=['GET', 'POST']) +def add_stat(id): + current = Activities.query.get_or_404(id) + name = current.name + form = AddNewStat() + if form.validate_on_submit(): + date = Stat.query.filter_by(activity_id=current.id).filter_by(time=form.date.data).first() + if date: + flash('You have already entered an ammount for that date!' + 'Edit the exisiting entry!') + else: + s = Stat(user_id = current_user.id, + activity_id = current.id, + ammount = form.ammount.data, + time = form.date.data) + db.session.add(s) + db.session.commit() + flash('Stats Updated!') + return redirect(url_for('users.home_view')) + else: + flash_errors(form) + return render_template('addstat.html', form=form, name=name) + + +@activities.route('/activity/', methods=['GET', 'POST']) +def stat_table(id): + a = Activities.query.get_or_404(id) + stat = Stat.query.filter_by(activity_id=a.id).order_by(Stat.time.desc()) + name = a.name + form = AddNewStat() + if form.validate_on_submit(): + date = Stat.query.filter_by(activity_id=a.id).filter_by(time=form.date.data).first() + if date: + date.ammount = form.ammount.data + db.session.commit() + flash('Stat Updated!') + return render_template('activity_data.html', stat=stat, form=form, a=a) + else: + s = Stat(user_id = current_user.id, + activity_id = a.id, + ammount = form.ammount.data, + time = form.date.data) + db.session.add(s) + db.session.commit() + flash('Stats Updated!') + return render_template('activity_data.html', a=a, form=form, stat=stat) + else: + flash_errors(form) + return render_template('activity_data.html', stat=stat, a=a, form=form) + + +@activities.route('/activity//stats/