Skip to content
This repository has been archived by the owner on Feb 8, 2018. It is now read-only.

Commit

Permalink
basic first 'runnable' version
Browse files Browse the repository at this point in the history
 - ORM
 - migrations
 - upload
 - some read only resources
  • Loading branch information
hoegaarden committed Jun 24, 2013
1 parent c29c84a commit 892b727
Show file tree
Hide file tree
Showing 12 changed files with 535 additions and 24 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
node_modules
*~
npm-debug.log
database.json
55 changes: 55 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
dustmap-server
=

Install / Run
-
git clone git://github.com/dustmap/dustmap-server
cd dustmap-server
cp database.json.example database.json
$EDITOR database.json
export NODE_ENV=<the_environment>
node_modules/.bin/db-migrate --env=$NODE_ENV up
npm start

What is this?
-
This is the server component for the dustmap.org project. The project's goal is to

- develop and deploy el-cheapo hardware (raspberrypi, arduino, ...) to measure environmental data (especially particulate matter)
- develop software for measuring, transfering, storing and visualizing the recorded data

Every outcome of the project whatsoever should be as open as possible (read: open data, open source software, open hardware, ...)

If you are interested in any of the stuff we (try to) do, feel free to drop us a line at [email protected] ...

Hints
-
- Tests are currently not working, should come back soon
- You need to setup the connection to your database in `database.json` in the root of the project
- You can setup multiple different database connections for different `NODE_ENV`, check out the `database.json.example`
- We currently use `hstore`. Perhaps this will change in future (we'll perhaps use `JSON` as a datatype as soon as soon as 9.3 is out ... or not)
- Currently only the upload works ... more coming
- No Auth/Sign/.../Whatsoever ... coming in future
- If you need SSL to connect to the database, it seems you need to manually edit the require stanzas for both `node-orm2` and `db-migrate` and thus use the `libpg` bindings
- `... libpg('pg') ...` -> `require('pg').native ...`

More or less relevant links
-
- http://dustmap.org/
- https://github.com/dustmap/
- https://github.com/hoegaarden/dustnode-dustpi-gpio

TODO
-
- [x] ORM, DB, ...
- [x] Ratelimit, Upload Limit
- [ ] Validation in application
- [ ] JSON HAL enhancments
- [ ] Caching DB requests, E-Tags, ...
- [ ] perhaps patch `pg` to switch between natvie and js mode via environment variable (nedd to look into this ...)
- [ ] update / delete resources (?)
- [ ] user / node management
- [ ] static stuff for single site application (?)
- [ ] `req.format(...)` and other formats
- [ ] daemonizing the server

30 changes: 30 additions & 0 deletions database.json.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"dev" : {
"user" : "my username" ,
"password" : "my password" ,
"host" : "the db host" ,
"port" : 5432 ,
"database" : "the database" ,
"driver" : "postgres" ,
"protocol" : "postgres" ,
"query" : {
"ssl" : true ,
"debug" : true
}
}

, "other environment" : {
"user" : "my username" ,
"password" : "my password" ,
"host" : "the db host" ,
"port" : 5432 ,
"database" : "the database" ,
"driver" : "postgres -- this is needed by db-migration" ,
"protocol" : "postgres -- this is needed by node-orm2" ,
"query" : {
"ssl" : true ,
"debug" : true
}

}
}
22 changes: 13 additions & 9 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ var express = require('express')
, app = express()
, rate = require('express-rate')
, routes = require('./routes')
, orm = require('./models')
;
module.exports = app;



app.configure('production', function(){
app.use(express.logger());
});
Expand All @@ -19,16 +19,20 @@ app.configure(function(){
interval : 1
, limit : 10
}));
app.use(express.favicon());
app.use(express.bodyParser());
});

orm(function(err, db){
if (err)
throw err;

routes(app);

routes(app, db);

if (!module.parent) {
var port = process.env.PORT || 3000;
app.listen(port, function(){
console.log('listening on port', port);
});
}
if (!module.parent) {
var port = process.env.PORT || 3000;
app.listen(port, function(){
console.log('listening on port', port);
});
}
})
113 changes: 113 additions & 0 deletions migrations/20130613213125-init.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
var dbm = require('db-migrate');
var here = require('here').here;
var async = require('async');

exports.up = function(db, cb) {
var stmt = here(/*
create table nodes (
id serial not null
, name text not null
, owner text not null -- just text for now
, primary key ( id )
);
create table uploads (
id serial not null
, node integer not null
references nodes(id)
, ts timestamp without time zone not null default now() -- all UTC
, primary key ( id )
, unique ( node , ts )
);
create table measurements (
id serial not null
, upload integer not null
references uploads( id )
, data hstore not null
, primary key ( id )
, unique ( upload, data )
, constraint "data needs a value"
check ( data ? 'value' )
, constraint "data value must be numeric"
check ( (data->'value')::numeric is not null )
, constraint "data needs a type"
check ( data ? 'type' )
);
create index on measurements ( data );
create view node_uploads as
select
nodes.id as node_id
, nodes.name as node_name
, uploads.ts as ts
, measurements.data as data
from
nodes
left join uploads on nodes.id = uploads.node
left join measurements on uploads.id = measurements.upload
where
data is not null
;
create or replace function changeNodeUploads()
returns trigger
language plpgsql as $code$
declare
node_id nodes.id%TYPE;
upload_id uploads.id%TYPE;
begin
if TG_OP = 'INSERT' then
-- get node_id, perhaps insert a node beforehand
if not exists (select 1 from nodes where name = NEW.node_name) then
insert into nodes(name, owner) values(NEW.node_name, 'unknown')
returning id into node_id;
else
select id into node_id from nodes where name = NEW.node_name;
end if;
-- get the upload_id, perhaps create the upload beforehand
if not exists (select 1 from uploads where ts = NEW.ts and node = node_id) then
insert into uploads(ts, node) values(NEW.ts, node_id)
returning id into upload_id;
else
select id into upload_id from uploads where node = node_id and ts = NEW.ts;
end if;
-- finally, save the measurement
insert into measurements(upload, data) values(upload_id, NEW.data);
return NEW;
elsif TG_OP = 'UPDATE' then
raise exception 'Update not allowed, update the underlying tables';
return NEW;
elsif TG_OP = 'DELETE' then
raise exception 'Delete not allowed, delete from the underlying tables';
return NEW;
end if;
return NEW;
end;
$code$;
create trigger changeNodeUploads_trg
instead of INSERT or UPDATE or DELETE
on node_uploads
for each row execute procedure changeNodeUploads()
;
*/).valueOf();

db.runSql(stmt, cb);
};

exports.down = function(db, cb) {
async.series([
db.runSql.bind(db, 'drop view node_uploads')
, db.runSql.bind(db, 'drop function changeNodeUploads()')
, db.dropTable.bind(db, 'measurements')
, db.dropTable.bind(db, 'uploads')
, db.dropTable.bind(db, 'nodes')
], cb);
}
31 changes: 31 additions & 0 deletions models/Measurement.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
var hstore = require('pg-hstore');

function hstore2json() {
if (typeof this.data === 'string')
this.data = hstore.parse(this.data);
}

function json2hstore(next) {
this.data = hstore.stringify(this.data);
next();
}

module.exports = function(db, cb){
var Measurement = db.define('Measurement', {
data : { type: 'text', required: true }
},{
table : 'measurements'
, hooks : {
afterLoad: hstore2json ,
beforeSave: json2hstore ,
afterSave: hstore2json
}
});

Measurement.hasOne('upload', db.models.Upload, {
required : true
, reverse : 'measurement'
})

return cb();
}
17 changes: 17 additions & 0 deletions models/Node.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
module.exports = function(db, cb){
var Node = db.define('Node', {
name : { type: 'text', required: true }
, owner : { type: 'text', required: true }
},{
table : 'nodes'
});

/*
Node.hasMany('uploads', db.models.Upload, {}, {
// required: true
reverse: 'node'
});
*/

return cb();
}
29 changes: 29 additions & 0 deletions models/NodeUpload.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
var hstore = require('pg-hstore');

var hstore2json = function() {
if (typeof this.data === 'string')
this.data = hstore.parse(this.data);
}

var json2hstore = function(next) {
this.data = hstore.stringify(this.data);
return next();
}

module.exports = function(db, cb){
var NodeUpload = db.define('NodeUpload', {
node_name : { type: 'text', required: true }
, ts : { type: 'date', required: true }
, data : { type: 'binary', required: true }
},{
table : 'node_uploads'
, id : 'node_id'
, hooks : {
afterLoad: hstore2json ,
beforeSave: json2hstore ,
afterSave: hstore2json
}
});

return cb();
}
15 changes: 15 additions & 0 deletions models/Upload.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
module.exports = function(db, cb){
var Upload = db.define('Upload', {
ts : { type: 'date', required: true }
// , node : { type: 'number', required: true, rational: false, unsigned: true }
},{
table : 'uploads'
});

Upload.hasOne('node', db.models.Node, {
required : true
, reverse : 'uploads'
});

return cb();
}
46 changes: 46 additions & 0 deletions models/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
var orm = require('orm')
, coninfo = require('../database.json')[ process.env.NODE_ENV || 'dev' ]
, transaction = require("orm-transaction")
;

/*
* we need to load the models in the right order to make sure, we can setup
* relations (hasOne, ...) between them
*/
var files = [
'Node'
, 'Upload'
, 'Measurement'
, 'NodeUpload'
];

module.exports = function(cb, con) {
con = con || coninfo;

return orm.connect(con, function(err, db){
db.use(transaction);
db.settings.set('properties.association_key', '{name}');
db.settings.set('instance.cache', false);

var loaded = 0;
var errors = [];

files.forEach(function(file) {
db.load(file, function(err) {
loaded++;

if (err) {
console.error('Error loading model', file, err);
errors.push(err);
}

if (loaded === files.length) {
cb( errors.length ? errors : null, db );
}
});
});
});
}

// db.settings.set("properties.primary_key", "id");
// require('./validator').attach(db);
Loading

0 comments on commit 892b727

Please sign in to comment.