Skip to content

Commit

Permalink
Switching to node
Browse files Browse the repository at this point in the history
  • Loading branch information
corradio committed Sep 14, 2016
1 parent aa4c0f8 commit b591480
Show file tree
Hide file tree
Showing 561 changed files with 161 additions and 223 deletions.
7 changes: 0 additions & 7 deletions .dockerignore

This file was deleted.

22 changes: 1 addition & 21 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,26 +1,6 @@
.PHONY: server

install: install-grib2json
pip install -r requirements.txt
pip install honcho==0.7.1

install-grib2json:
git clone https://github.com/cambecc/grib2json
cd grib2json && mvn package && tar -xvf target/grib2json-0.8.0-SNAPSHOT.tar.gz

server:
honcho start

api:
python -u server.py

feeder:
python -u backend/feeder.py

db:
mongod

publish-gh-pages:
git subtree split --prefix public -b gh-pages
git subtree split --prefix static -b gh-pages
git push -f origin gh-pages:gh-pages
git branch -D gh-pages
2 changes: 0 additions & 2 deletions Procfile

This file was deleted.

16 changes: 1 addition & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,20 +70,6 @@ You can also see a list of missing informations displayed as warnings in the dev

![image](https://cloud.githubusercontent.com/assets/1655848/16256617/9c5872fc-3853-11e6-8c84-f562679086f3.png)

To get started, clone or [fork](https://help.github.com/articles/fork-a-repo/) the repository, and install all requirements:

```
Make install
```

You might need to install the [GRIB API](https://software.ecmwf.int/wiki/display/GRIB/GRIB+API+CMake+installation). On Mac OS, you can simply `brew install grib-api`.


Provided you have mongodb installed and running, you can run the full system using
```
Make server
```

If you have Docker, you can just run `docker-compose up` instead. Head over to [http://localhost:8000/](http://localhost:8000/) and you should see the map!
To get started, clone or [fork](https://help.github.com/articles/fork-a-repo/) the repository, and install [Docker](https://docs.docker.com/engine/installation/). then you just run `docker-compose up`. Head over to [http://localhost:8000/](http://localhost:8000/) and you should see the map!

Once you're done doing your changes, submit a [pull request](https://help.github.com/articles/using-pull-requests/) to get them integrated.
1 change: 1 addition & 0 deletions api/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
node_modules
10 changes: 10 additions & 0 deletions api/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
FROM node:4.5.0
WORKDIR /home
EXPOSE 8000
ADD ./data/europe.topo.json /home/data/europe.topo.json
ADD package.json /home/package.json
RUN npm install
ADD . /home
CMD node index.js
HEALTHCHECK CMD curl --fail http://localhost:8000/ || exit 1

16 changes: 16 additions & 0 deletions api/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "electricitymap-api",
"version": "0.0.1",
"description": "",
"dependencies": {
"express": "^4.10.2",
"mongodb": "^2.2.9",
"node-statsd": "^0.1.1"
},
"devDependencies": {
"nodemon": "^1.10.2"
},
"scripts": {
"dev": "nodemon server.js"
}
}
83 changes: 83 additions & 0 deletions api/server.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
var express = require('express');
var app = express();
var http = require('http').Server(app);
var statsd = require('node-statsd');
var MongoClient = require('mongodb').MongoClient;

// * Database
var mongoCollection;
MongoClient.connect(process.env['MONGO_URL'], function(err, db) {
if (err) throw (err);
console.log('Connected to database');
mongoCollection = db.collection('realtime');
// Create indexes
mongoCollection.createIndexes(
[
{ datetime: -1 },
{ countryCode: 1},
{ datetime: -1, countryCode: 1 }
],
null,
function (err, indexName) {
if (err) console.error(err);
else console.log('Database indexes created');
}
);
http.listen(8000, function() {
console.log('Listening on *:8000');
});
});

// * Metrics
var statsdClient = new statsd.StatsD();
statsdClient.post = 8125;
statsdClient.host = process.env['STATSD_HOST'];
statsdClient.prefix = 'electricymap_api';

// * Routes
app.use(express.static('static'));
app.use(express.static('vendor'));
app.get('/v1/wind', function(req, res) {
statsdClient.increment('wind_GET');
res.header('Content-Encoding', 'gzip');
res.sendFile(__dirname + '/data/wind.json.gz');
});
app.get('/v1/solar', function(req, res) {
statsdClient.increment('solar_GET');
res.header('Content-Encoding', 'gzip');
res.sendFile(__dirname + '/data/solar.json.gz');
});
app.get('/v1/production', function(req, res) {
statsdClient.increment('production_GET');
var t0 = new Date().getTime();
mongoCollection.aggregate([
{'$sort': {'countryCode': 1, 'datetime': -1}},
{'$group': {'_id': '$countryCode', 'lastDocument': {'$first': '$$CURRENT'}}}
], function (err, result) {
if (err) {
statsdClient.increment('production_GET_ERROR');
console.error(err);
res.status(500).json({error: 'Unknown database error'});
} else {
obj = {}
result.forEach(function(d) { obj[d['_id']] = d.lastDocument; });
res.json({status: 'ok', data: obj});
statsdClient.timing('production', new Date().getTime() - t0);
}
});
});
app.get('/health', function(req, res) {
statsdClient.increment('health_GET');
var EXPIRATION_SECONDS = 30 * 60.0;
mongoCollection.findOne({}, {sort: [['datetime', -1]]}, function (err, doc) {
if (err) {
console.error(err);
res.status(500).json({error: 'Unknown database error'});
} else {
if (new Date().getTime() - new Date(doc.datetime).getTime() > EXPIRATION_SECONDS * 1000.0)
res.status(500).json({error: 'Database is empty or last measurement is too old'});
else
res.json({status: 'ok'});
}
});
});
4 changes: 0 additions & 4 deletions common.yml

This file was deleted.

21 changes: 13 additions & 8 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,21 @@
version: '2'
services:
web:
extends:
file: common.yml
service: web
api:
build: api
command: ./node_modules/.bin/nodemon server.js
depends_on: [mongo]
environment: [ENV=development, 'MONGO_URL=mongodb://mongo:27017/electricity']
ports: ['8000:8000']
volumes:
- './public:/home/public'
- './backend:/home/backend'
- './server.py:/home/server.py'
environment: [ENV=development, 'MONGO_URL=mongodb://mongo:27017']
- './static:/home/static'
- './api/server.js:/home/server.js'
- './api/data:/home/data'
feeder:
build: feeder
depends_on: [mongo]
environment: [ENV=development, 'MONGO_URL=mongodb://mongo:27017/electricity']
volumes:
- './api/data:/home/data'
mongo:
image: mongo
volumes: ['./mongodata/:/data/db']
19 changes: 19 additions & 0 deletions feeder/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
FROM java:8
WORKDIR /home/
ENV LANG=en_US.UTF-8
ENV PYTHONIOENCODING=utf8
RUN apt-get update
# Install python
RUN apt-get install -y gcc make python-dev libxml2-dev libxslt-dev gzip
RUN curl -s https://bootstrap.pypa.io/get-pip.py | python
# Install pygrib dependencies manually
RUN apt-get install -y maven
RUN apt-get install -y libgrib-api-dev && pip install numpy==1.10.1 pyproj==1.9.4
RUN git clone https://github.com/cambecc/grib2json && \
cd grib2json && mvn package && tar -xvf target/grib2json-0.8.0-SNAPSHOT.tar.gz
# Only add requirements to enable cached builds when it is unchanged
ADD requirements.txt /home/requirements.txt
RUN pip install -r requirements.txt
# Add the rest
ADD . /home/
CMD python -u feeder.py
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion backend/parsers/solar.py → feeder/parsers/solar.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def fetch_solar():
'forecasts': [obj_before, obj_after]
}

with gzip.open('public/data/solar.json.gz', 'w') as f:
with gzip.open('data/solar.json.gz', 'w') as f:
json.dump(obj, f)
print 'Done'

Expand Down
10 changes: 5 additions & 5 deletions backend/parsers/wind.py → feeder/parsers/wind.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def fetch_wind():
'-Xmx512M',
'-jar', 'grib2json/grib2json-0.8.0-SNAPSHOT/lib/grib2json-0.8.0-SNAPSHOT.jar',
'-d', '-n', '-c', '-o',
'public/data/wind_before.json', 'wind.grb2'], shell=False)
'data/wind_before.json', 'wind.grb2'], shell=False)

# Fetch the forecast after
_ = fetch_forecast(origin, horizon.replace(hours=+MULTIPLE_HORIZON))
Expand All @@ -59,11 +59,11 @@ def fetch_wind():
'-Xmx512M',
'-jar', 'grib2json/grib2json-0.8.0-SNAPSHOT/lib/grib2json-0.8.0-SNAPSHOT.jar',
'-d', '-n', '-c', '-o',
'public/data/wind_after.json', 'wind.grb2'], shell=False)
'data/wind_after.json', 'wind.grb2'], shell=False)

with open('public/data/wind_before.json') as f_before, \
open('public/data/wind_after.json') as f_after, \
gzip.open('public/data/wind.json.gz', 'w') as f_out:
with open('data/wind_before.json') as f_before, \
open('data/wind_after.json') as f_after, \
gzip.open('data/wind.json.gz', 'w') as f_out:
obj = {
'forecasts': [json.load(f_before), json.load(f_after)]
}
Expand Down
3 changes: 0 additions & 3 deletions requirements.txt → feeder/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
arrow==0.5.0
Flask==0.11
Flask-Cors==2.1.2
pygrib==2.0.1
pymongo==3.2.2
requests==2.10.0
schedule==0.3.2
-e git+https://github.com/gaelenh/python-statsd-client.git#egg=statsd-client
uwsgi
16 changes: 6 additions & 10 deletions production.yaml
Original file line number Diff line number Diff line change
@@ -1,30 +1,26 @@
version: '2'
services:
web:
extends:
file: common.yml
service: web
api:
build: api
environment:
- ENV=production
- MONGO_URL=mongodb://mongo:27017
- MONGO_URL=mongodb://mongo:27017/electricity
- STATSD_HOST=grafana-graphite-statsd.infrastructure_default
- VIRTUAL_HOST=electricitymap-api.tmrow.co
env_file: ./mailgun.env
depends_on: [mongo]
networks: [default, infrastructure] # required to be able to com' with statsd & nginx
command: uwsgi --ini uwsgi.ini
volumes: ['/home/shared/electricitymap/data:/home/public/data']
volumes: ['/home/shared/electricitymap/data:/home/data']
feeder:
image: electricitymap_web
build: feeder
environment:
- ENV=production
- MONGO_URL=mongodb://mongo:27017
- STATSD_HOST=grafana-graphite-statsd.infrastructure_default
env_file: ./mailgun.env
depends_on: [mongo]
networks: [default, infrastructure] # required to be able to com' with statsd
command: python -u backend/feeder.py
volumes: ['/home/shared/electricitymap/data:/home/public/data']
volumes: ['/home/shared/electricitymap/data:/home/data']
mongo:
image: mongo
volumes: ['/home/shared/electricitymap/mongodata:/data/db']
Expand Down
Loading

0 comments on commit b591480

Please sign in to comment.