From 74285708c31f9b42a9cf98619ed7b2f627d77789 Mon Sep 17 00:00:00 2001 From: Olivier Corradi Date: Fri, 3 Mar 2017 15:23:08 +0100 Subject: [PATCH] Cleanup in progress --- .dockerignore | 2 - .gitignore | 3 - docker-compose.yml | 22 -- feeder/Dockerfile | 18 - feeder/feeder.py | 483 ------------------------- feeder/migrate_db.py | 44 --- feeder/package.json | 21 -- feeder/parsers/__init__.py | 0 feeder/push_cache.js | 90 ----- feeder/requirements.txt | 12 - {feeder/parsers => parsers}/ENTSOE.py | 0 {feeder/parsers => parsers}/FR.py | 0 {feeder/parsers => parsers}/IS.py | 0 {feeder => parsers}/__init__.py | 0 {feeder/parsers => parsers}/weather.py | 0 production_gce.yml | 20 - web/app/main.js | 15 +- web/package.json | 4 - web/server.js | 398 +------------------- 19 files changed, 25 insertions(+), 1107 deletions(-) delete mode 100644 feeder/Dockerfile delete mode 100644 feeder/feeder.py delete mode 100644 feeder/migrate_db.py delete mode 100644 feeder/package.json delete mode 100644 feeder/parsers/__init__.py delete mode 100644 feeder/push_cache.js delete mode 100644 feeder/requirements.txt rename {feeder/parsers => parsers}/ENTSOE.py (100%) rename {feeder/parsers => parsers}/FR.py (100%) rename {feeder/parsers => parsers}/IS.py (100%) rename {feeder => parsers}/__init__.py (100%) rename {feeder/parsers => parsers}/weather.py (100%) diff --git a/.dockerignore b/.dockerignore index 8029661c66..c1225fa5a5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,8 +6,6 @@ datascience -feeder/node_modules - web/node_modules web/build web/public/dist diff --git a/.gitignore b/.gitignore index 205eac9833..e2af10a71e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,6 @@ *.pyc *.env -mongodata src -grib2json -feeder/node_modules web/public/dist web/node_modules .ipynb_checkpoints/ diff --git a/docker-compose.yml b/docker-compose.yml index cf59d1931c..adbe10ece3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,14 +5,10 @@ services: context: . dockerfile: web/Dockerfile command: npm run server-dev - depends_on: [mongo, memcached] environment: - ENV=development - - MEMCACHED_HOST=memcached - - 'MONGO_URL=mongodb://mongo:27017/electricity' ports: ['8000:8000'] volumes: - - './shared:/home/shared' - './web/app:/home/web/app' - './web/package.json:/home/web/package.json' - './web/public:/home/web/public' @@ -20,21 +16,3 @@ services: - './web/views:/home/web/views' - './web/webpack.config.js:/home/web/webpack.config.js' - './web/locales:/home/web/locales' - feeder: - build: - context: . - dockerfile: feeder/Dockerfile - depends_on: [mongo] - env_file: ./secrets.env - environment: - - ENV=development - - MEMCACHED_HOST=memcached - - 'MONGO_URL=mongodb://mongo:27017/electricity' - volumes: - - './feeder:/home/feeder' - - './shared:/home/shared' - memcached: - image: memcached - mongo: - image: mongo - volumes: ['/data/db'] diff --git a/feeder/Dockerfile b/feeder/Dockerfile deleted file mode 100644 index 1724ae527b..0000000000 --- a/feeder/Dockerfile +++ /dev/null @@ -1,18 +0,0 @@ -FROM python:2.7 -# Install node -RUN curl -sL https://deb.nodesource.com/setup_6.x | bash - && \ - apt-get install -y nodejs -# Install pygrib dependencies manually -RUN apt-get install -y libgrib-api-dev libsnappy-dev && \ - pip install numpy==1.10.1 pyproj==1.9.4 -WORKDIR /home/feeder -# Only add requirements to enable cached builds when it is unchanged -ADD feeder/requirements.txt /home/feeder/requirements.txt -RUN pip install -r requirements.txt -# Same with package.json -ADD feeder/package.json /home/feeder/package.json -RUN npm install -# Add the rest -ADD feeder /home/feeder -ADD shared /home/shared -CMD python -u feeder.py diff --git a/feeder/feeder.py b/feeder/feeder.py deleted file mode 100644 index 1f9e469f42..0000000000 --- a/feeder/feeder.py +++ /dev/null @@ -1,483 +0,0 @@ -import arrow -import glob -import pymongo -import json, logging, os, schedule, subprocess, time -import requests -import snappy - -from bson.binary import Binary -from pymemcache.client.base import Client - -from parsers import IS -from parsers import FR -from parsers import ENTSOE -from parsers import weather -from migrate_db import migrate - -INTERVAL_SECONDS = 60 * 5 - -# Set up error handling -import opbeat -from opbeat.handlers.logging import OpbeatHandler -if 'OPBEAT_SECRET' in os.environ: - opbeat_client = opbeat.Client( - app_id='c36849e44e', - organization_id='093c53b0da9d43c4976cd0737fe0f2b1', - secret_token=os.environ['OPBEAT_SECRET']) -else: - opbeat_client = None - print "No Opbeat Token found! Opbeat Error handling is inactive." - -# Set up logging -ENV = os.environ.get('ENV', 'development').lower() -logger = logging.getLogger(__name__) -stdout_handler = logging.StreamHandler() -logger.addHandler(stdout_handler) -if not ENV == 'development': - logger.setLevel(logging.INFO) - # Add opbeat - if opbeat_client: - opbeat_handler = OpbeatHandler(opbeat_client) - opbeat_handler.setLevel(logging.WARN) - logger.addHandler(opbeat_handler) - # Add email - from logging.handlers import SMTPHandler - smtp_handler = SMTPHandler( - mailhost=('smtp.mailgun.org', 587), - fromaddr='Application Bug Reporter ', - toaddrs=['olivier.corradi@gmail.com'], - subject='Electricity Map Feeder Error', - credentials=(os.environ.get('MAILGUN_USER'), os.environ.get('MAILGUN_PASSWORD')) - ) - smtp_handler.setLevel(logging.WARN) - logger.addHandler(smtp_handler) -else: - logger.setLevel(logging.DEBUG) - -logger.info('Feeder is starting..') - -# Define all production parsers -CONSUMPTION_PARSERS = { - 'AT': ENTSOE.fetch_consumption, - 'BE': ENTSOE.fetch_consumption, - 'BG': ENTSOE.fetch_consumption, - 'CH': ENTSOE.fetch_consumption, - 'CZ': ENTSOE.fetch_consumption, - 'DE': ENTSOE.fetch_consumption, - 'DK': ENTSOE.fetch_consumption, - 'EE': ENTSOE.fetch_consumption, - 'ES': ENTSOE.fetch_consumption, - 'FI': ENTSOE.fetch_consumption, - # 'FR': FR.fetch_consumption, - 'GB': ENTSOE.fetch_consumption, - 'GB-NIR': ENTSOE.fetch_consumption, - 'GR': ENTSOE.fetch_consumption, - 'HU': ENTSOE.fetch_consumption, - 'IE': ENTSOE.fetch_consumption, - 'IT': ENTSOE.fetch_consumption, - 'LT': ENTSOE.fetch_consumption, - 'LU': ENTSOE.fetch_consumption, - 'LV': ENTSOE.fetch_consumption, - 'ME': ENTSOE.fetch_consumption, - 'NL': ENTSOE.fetch_consumption, - 'NO': ENTSOE.fetch_consumption, - 'PL': ENTSOE.fetch_consumption, - 'PT': ENTSOE.fetch_consumption, - 'RO': ENTSOE.fetch_consumption, - 'RS': ENTSOE.fetch_consumption, - 'SE': ENTSOE.fetch_consumption, - 'SI': ENTSOE.fetch_consumption, - 'SK': ENTSOE.fetch_consumption, -} -PRODUCTION_PARSERS = { - 'AT': ENTSOE.fetch_production, - 'BE': ENTSOE.fetch_production, - 'BG': ENTSOE.fetch_production, - 'CH': ENTSOE.fetch_production, - 'CZ': ENTSOE.fetch_production, - 'DE': ENTSOE.fetch_production, - 'DK': ENTSOE.fetch_production, - 'EE': ENTSOE.fetch_production, - 'ES': ENTSOE.fetch_production, - 'FI': ENTSOE.fetch_production, - 'FR': FR.fetch_production, - 'GB': ENTSOE.fetch_production, - 'GB-NIR': ENTSOE.fetch_production, - 'GR': ENTSOE.fetch_production, - 'HU': ENTSOE.fetch_production, - 'IE': ENTSOE.fetch_production, - 'IS': IS.fetch_production, - 'IT': ENTSOE.fetch_production, - 'LT': ENTSOE.fetch_production, - 'LU': ENTSOE.fetch_production, - 'LV': ENTSOE.fetch_production, - 'ME': ENTSOE.fetch_production, - 'NL': ENTSOE.fetch_production, - 'NO': ENTSOE.fetch_production, - 'PL': ENTSOE.fetch_production, - 'PT': ENTSOE.fetch_production, - 'RO': ENTSOE.fetch_production, - 'RS': ENTSOE.fetch_production, - 'SE': ENTSOE.fetch_production, - 'SI': ENTSOE.fetch_production, - 'SK': ENTSOE.fetch_production, -} -# Keys are unique because both countries are sorted alphabetically -EXCHANGE_PARSERS = { - # AL - 'AL->GR': ENTSOE.fetch_exchange, - 'AL->ME': ENTSOE.fetch_exchange, - 'AL->RS': ENTSOE.fetch_exchange, - # AT - 'AT->CH': ENTSOE.fetch_exchange, - 'AT->CZ': ENTSOE.fetch_exchange, - 'AT->DE': ENTSOE.fetch_exchange, - 'AT->HU': ENTSOE.fetch_exchange, - 'AT->IT': ENTSOE.fetch_exchange, - 'AT->SI': ENTSOE.fetch_exchange, - # BA - 'BA->ME': ENTSOE.fetch_exchange, - 'BA->RS': ENTSOE.fetch_exchange, - # BE - 'BE->FR': ENTSOE.fetch_exchange, - 'BE->NL': ENTSOE.fetch_exchange, - # BG - 'BG->GR': ENTSOE.fetch_exchange, - 'BG->MK': ENTSOE.fetch_exchange, - 'BG->RO': ENTSOE.fetch_exchange, - 'BG->RS': ENTSOE.fetch_exchange, - 'BG->TR': ENTSOE.fetch_exchange, - # BY - 'BY->LT': ENTSOE.fetch_exchange, - # CH - 'CH->DE': ENTSOE.fetch_exchange, - 'CH->FR': ENTSOE.fetch_exchange, - 'CH->IT': ENTSOE.fetch_exchange, - # CZ - 'CZ->SK': ENTSOE.fetch_exchange, - 'CZ->PL': ENTSOE.fetch_exchange, - 'CZ->DE': ENTSOE.fetch_exchange, - # DE - 'DE->DK': ENTSOE.fetch_exchange, - 'DE->FR': ENTSOE.fetch_exchange, - 'DE->PL': ENTSOE.fetch_exchange, - 'DE->NL': ENTSOE.fetch_exchange, - 'DE->SE': ENTSOE.fetch_exchange, - # DK - 'DK->NO': ENTSOE.fetch_exchange, - 'DK->SE': ENTSOE.fetch_exchange, - # EE - 'EE->FI': ENTSOE.fetch_exchange, - 'EE->LV': ENTSOE.fetch_exchange, - 'EE->RU': ENTSOE.fetch_exchange, - # ES - 'ES->FR': ENTSOE.fetch_exchange, - 'ES->PT': ENTSOE.fetch_exchange, - # FI - 'FI->NO': ENTSOE.fetch_exchange, - 'FI->RU': ENTSOE.fetch_exchange, - 'FI->SE': ENTSOE.fetch_exchange, - # FR - 'FR->GB': ENTSOE.fetch_exchange, - 'FR->IT': ENTSOE.fetch_exchange, - # GB - 'GB->IE': ENTSOE.fetch_exchange, - 'GB->GB-NIR': ENTSOE.fetch_exchange, - 'GB->NL': ENTSOE.fetch_exchange, - # GB-NIR - 'GB-NIR->IE': ENTSOE.fetch_exchange, - # GR - 'GR->IT': ENTSOE.fetch_exchange, - 'GR->MK': ENTSOE.fetch_exchange, - 'GR->TR': ENTSOE.fetch_exchange, - # HR - 'HR->HU': ENTSOE.fetch_exchange, - 'HR->RS': ENTSOE.fetch_exchange, - # HU - 'HU->RO': ENTSOE.fetch_exchange, - 'HU->RS': ENTSOE.fetch_exchange, - 'HU->SK': ENTSOE.fetch_exchange, - 'HU->UA': ENTSOE.fetch_exchange, - # IT - 'IT->MT': ENTSOE.fetch_exchange, - 'IT->SI': ENTSOE.fetch_exchange, - # LT - 'LT->LV': ENTSOE.fetch_exchange, - 'LT->PL': ENTSOE.fetch_exchange, - 'LT->RU': ENTSOE.fetch_exchange, - 'LT->SE': ENTSOE.fetch_exchange, - # LV - 'LV->RU': ENTSOE.fetch_exchange, - # ME - 'ME->RS': ENTSOE.fetch_exchange, - # MD - # 'MD->RO': ENTSOE.fetch_exchange, - # MK - 'MK->RS': ENTSOE.fetch_exchange, - # NL - 'NL->NO': ENTSOE.fetch_exchange, - # NO - 'NO->SE': ENTSOE.fetch_exchange, - # PL - 'PL->SE': ENTSOE.fetch_exchange, - 'PL->SK': ENTSOE.fetch_exchange, - 'PL->UA': ENTSOE.fetch_exchange, - # RO - 'RO->RS': ENTSOE.fetch_exchange, - 'RO->UA': ENTSOE.fetch_exchange, - # SK - 'SK->UA': ENTSOE.fetch_exchange, -} - -PRICE_PARSERS = { - 'AT': ENTSOE.fetch_price, - 'BE': ENTSOE.fetch_price, - 'BG': ENTSOE.fetch_price, - 'CH': ENTSOE.fetch_price, - 'CZ': ENTSOE.fetch_price, - 'DE': ENTSOE.fetch_price, - 'DK': ENTSOE.fetch_price, - 'EE': ENTSOE.fetch_price, - 'ES': ENTSOE.fetch_price, - 'FI': ENTSOE.fetch_price, - 'FR': FR.fetch_price, - 'GB': ENTSOE.fetch_price, - 'GB-NIR': ENTSOE.fetch_price, - 'GR': ENTSOE.fetch_price, - 'HU': ENTSOE.fetch_price, - 'IE': ENTSOE.fetch_price, - 'IT': ENTSOE.fetch_price, - 'LT': ENTSOE.fetch_price, - 'LU': ENTSOE.fetch_price, - 'LV': ENTSOE.fetch_price, - 'NL': ENTSOE.fetch_price, - 'NO': ENTSOE.fetch_price, - 'PL': ENTSOE.fetch_price, - 'PT': ENTSOE.fetch_price, - 'RO': ENTSOE.fetch_price, - 'RS': ENTSOE.fetch_price, - 'SE': ENTSOE.fetch_price, - 'SI': ENTSOE.fetch_price, - 'SK': ENTSOE.fetch_price, -} - -# Set up database -client = pymongo.MongoClient(os.environ.get('MONGO_URL', 'mongodb://localhost:27017')) -db = client['electricity'] -col_consumption = db['consumption'] -col_gfs = db['gfs'] -col_production = db['production'] -col_exchange = db['exchange'] -col_price = db['price'] -# Set up indices -col_consumption.create_index([('datetime', -1), ('countryCode', 1)], unique=True) -col_gfs.create_index([('refTime', -1), ('targetTime', 1), ('key', 1)], unique=True) -col_gfs.create_index([('refTime', -1), ('targetTime', -1), ('key', 1)], unique=True) -col_production.create_index([('datetime', -1), ('countryCode', 1)], unique=True) -col_exchange.create_index([('datetime', -1), ('sortedCountryCodes', 1)], unique=True) -col_price.create_index([('datetime', -1), ('countryCode', 1)], unique=True) - -# Set up memcached -MEMCACHED_HOST = os.environ.get('MEMCACHED_HOST', None) -MEMCACHED_STATE_KEY = 'state' -if not MEMCACHED_HOST: - logger.warn('MEMCACHED_HOST env variable was not found.. starting without cache!') - cache = None -else: - cache = Client((MEMCACHED_HOST, 11211)) - -# Set up requests -session = requests.session() - -def validate_consumption(obj, country_code): - # Data quality check - if obj['consumption'] is not None and obj['consumption'] < 0: - raise ValueError('%s: consumption has negative value %s' % (country_code, obj['consumption'])) - -def validate_production(obj, country_code): - if not 'datetime' in obj: - raise Exception('datetime was not returned for %s' % country_code) - if obj.get('countryCode', None) != country_code: - raise Exception("Country codes %s and %s don't match" % (obj.get('countryCode', None), country_code)) - if arrow.get(obj['datetime']) > arrow.now(): - raise Exception("Data from %s can't be in the future" % country_code) - if obj.get('production', {}).get('unknown', None) is None and \ - obj.get('production', {}).get('coal', None) is None and \ - country_code not in ['CH', 'NO']: - raise Exception("Coal or unknown production value is required for %s" % (country_code)) - for k, v in obj['production'].iteritems(): - if v is None: continue - if v < 0: raise ValueError('%s: key %s has negative value %s' % (country_code, k, v)) - -def db_upsert(col, obj, database_key): - now = arrow.now().datetime - query = { database_key: obj[database_key], 'datetime': obj['datetime'] } - result = col.update_one(query, { '$set': obj }, upsert=True) - if result.modified_count: - logger.info('[%s] Updated %s @ %s' % (col.full_name, obj[database_key], obj['datetime'])) - col.update_one(query, { '$set': { 'modifiedAt': now } }) - elif result.matched_count: - logger.debug('[%s] Already up to date: %s @ %s' % (col.full_name, obj[database_key], obj['datetime'])) - elif result.upserted_id: - logger.info('[%s] Inserted %s @ %s' % (col.full_name, obj[database_key], obj['datetime'])) - col.update_one(query, { '$set': { 'createdAt': now } }) - else: - raise Exception('Unknown database command result.') - return result - -def fetch_consumptions(): - for country_code, parser in CONSUMPTION_PARSERS.iteritems(): - try: - obj = parser(country_code, session) - if not obj: continue - validate_consumption(obj, country_code) - # Database insert - result = db_upsert(col_consumption, obj, 'countryCode') - if (result.modified_count or result.upserted_id) and cache and ENV == 'development': cache.delete(MEMCACHED_STATE_KEY) - except: - logger.exception('Exception while fetching consumption of %s' % country_code) - -def fetch_productions(): - for country_code, parser in PRODUCTION_PARSERS.iteritems(): - try: - obj = parser(country_code, session) - if not obj: continue - if type(obj) != list: obj = [obj] - for item in obj: - if not item: continue - validate_production(item, country_code) - # Database insert - result = db_upsert(col_production, item, 'countryCode') - if (result.modified_count or result.upserted_id) and cache and ENV == 'development': cache.delete(MEMCACHED_STATE_KEY) - except: - logger.exception('Exception while fetching production of %s' % country_code) - -def fetch_exchanges(): - for k, parser in EXCHANGE_PARSERS.iteritems(): - try: - country_code1, country_code2 = k.split('->') - if sorted([country_code1, country_code2])[0] != country_code1: - raise Exception('Exchange key pair %s is not ordered alphabetically' % k) - obj = parser(country_code1, country_code2, session) - if not obj: continue - if type(obj) != list: obj = [obj] - for item in obj: - if item.get('sortedCountryCodes', None) != k: - raise Exception("Sorted country codes %s and %s don't match" % (item.get('sortedCountryCodes', None), k)) - if not 'datetime' in item: - raise Exception('datetime was not returned for %s' % k) - if arrow.get(item['datetime']) > arrow.now(): - raise Exception("Data from %s can't be in the future" % k) - # Database insert - result = db_upsert(col_exchange, item, 'sortedCountryCodes') - if (result.modified_count or result.upserted_id) and cache and ENV == 'development': cache.delete(MEMCACHED_STATE_KEY) - except: - logger.exception('Exception while fetching exchange of %s' % k) - -def fetch_price(): - for country_code, parser in PRICE_PARSERS.iteritems(): - try: - obj = parser(country_code,session) - if not obj: continue - # Database insert - result = db_upsert(col_price,obj, 'countryCode') - if (result.modified_count or result.upserted_id) and cache and ENV == 'development': cache.delete(MEMCACHED_STATE_KEY) - except: - logger.exception('Exception while fetching pricing of %s' % country_code) - -def db_upsert_forecast(col, obj, database_key): - now = arrow.now().datetime - query = { - database_key: obj[database_key], - 'refTime': obj['refTime'], - 'targetTime': obj['targetTime'] - } - result = col.update_one(query, { '$set': obj }, upsert=True) - delta_hours = int((obj['targetTime'] - obj['refTime']).total_seconds() / 3600.0) - if result.modified_count: - col.update_one(query, { '$set': { 'modifiedAt': now } }) - logger.info('[%s] Updated %s @ %s +%d' % (col.full_name, obj[database_key], obj['refTime'], delta_hours)) - elif result.matched_count: - logger.debug('[%s] Already up to date: %s @ %s +%d' % (col.full_name, obj[database_key], obj['refTime'], delta_hours)) - elif result.upserted_id: - col.update_one(query, { '$set': { 'createdAt': now } }) - logger.info('[%s] Inserted %s @ %s +%d' % (col.full_name, obj[database_key], obj['refTime'], delta_hours)) - else: - raise Exception('Unknown database command result.') - return result - -def fetch_next_forecasts(now=None, lookahead=6, cached=False): - if not now: now = arrow.utcnow() - horizon = now.floor('hour') - while (int(horizon.format('HH')) % weather.STEP_HORIZON) != 0: - horizon = horizon.replace(hours=-1) - # Warning: solar will not be available at horizon 0 - # so always do at least horizon 1 - origin = horizon.replace(hours=-1) - while (int(origin.format('HH')) % weather.STEP_ORIGIN) != 0: - origin = origin.replace(hours=-1) - - objs = [] - for i in range(lookahead): - # Check if wind and solar are already in the database - if cached: - results = map(lambda d: d['key'], col_gfs.find({ - 'refTime': origin.datetime, - 'targetTime': horizon.datetime - }, projection={'key': 1})) - delta_hours = int((horizon.datetime - origin.datetime).total_seconds() / 3600.0) - if cached and set(results) == set(['wind', 'solar']): - logger.debug('[%s] Already in database: %s @ %s +%d' % (col_gfs.full_name, ('wind', 'solar'), origin.datetime, delta_hours)) - else: - objs.append(weather.fetch_forecast(origin, horizon)) - horizon = horizon.replace(hours=+weather.STEP_HORIZON) - - return objs - -def fetch_weather(): - try: - objs = fetch_next_forecasts(cached=True) - for obj in objs: - wind = { - 'refTime': obj['refTime'], - 'targetTime': obj['targetTime'], - 'data': Binary(snappy.compress(json.dumps(obj['wind']))), - 'key': 'wind' - } - solar = { - 'refTime': obj['refTime'], - 'targetTime': obj['targetTime'], - 'data': Binary(snappy.compress(json.dumps(obj['solar']))), - 'key': 'solar' - } - db_upsert_forecast(col_gfs, wind, 'key') - db_upsert_forecast(col_gfs, solar, 'key') - except: - logger.exception('fetch_weather()') - -def push_cache(): - try: - subprocess.check_call(['node', 'push_cache.js'], shell=False) - except: - logger.exception('push_cache()') - -def fetch_electricity(): - # Fetch all electricity data - fetch_weather() - fetch_consumptions() - fetch_productions() - fetch_exchanges() - fetch_price() - push_cache() - -migrate(db, validate_production) -push_cache() - -schedule.every(15).minutes.do(fetch_weather) -schedule.every(INTERVAL_SECONDS).seconds.do(fetch_electricity) - -fetch_electricity() - -while True: - schedule.run_pending() - time.sleep(10) # Only sleep for 10 seconds before checking again diff --git a/feeder/migrate_db.py b/feeder/migrate_db.py deleted file mode 100644 index 7868b3817d..0000000000 --- a/feeder/migrate_db.py +++ /dev/null @@ -1,44 +0,0 @@ -import pymongo - -def migrate(db, validate_production): - print 'Starting data migration..' - # ** Migrate one collection (production) to two (production & exchanges) - col_production = db['production'] - col_exchange = db['exchange'] - col_old = db['realtime'] - for row in col_old.find(): - # Extract exchange - if 'exchange' in row: - exchange = row['exchange'] - # Insert into exchange db - for k, v in exchange.iteritems(): - if k == 'datetime': continue - sortedCountryCodes = '->'.join(sorted([k, row['countryCode']])) - col_exchange.insert({ - 'datetime': row.get('datetimeExchange', row['datetime']), - 'sortedCountryCodes': sortedCountryCodes, - 'netFlow': v if sortedCountryCodes[1] == k else v * -1 - }) - # Delete exchange - del row['exchange'] - if 'datetimeExchange' in row: del row['datetimeExchange'] - if 'datetimeProduction' in row: del row['datetimeProduction'] - # Copy in other collection - try: col_production.insert(row) - except pymongo.errors.DuplicateKeyError: pass - # Delete in old collection - col_old.remove({'_id': row['_id']}) - # ** Validate production data - # for row in col_production.find(): - # try: - # validate_production(row, row.get('countryCode', None)) - # except: - # print 'Warning: row %s did not pass validation' % row['_id'] - # print row - # ** 2017-01-28 Add storage - for row in col_production.find({'countryCode': 'FR', 'consumption': {'$exists': True}}): - print 'Migrating %s' % row['datetime'] - row['storage'] = row['consumption'] - del row['consumption'] - col_production.update_one({'_id': row['_id']}, {'$set': row}, upsert=False) - print 'Migration done.' diff --git a/feeder/package.json b/feeder/package.json deleted file mode 100644 index 137e35fe14..0000000000 --- a/feeder/package.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "name": "electricitymap-feeder", - "version": "0.0.1", - "description": "", - "dependencies": { - "async": "^2.0.1", - "d3": "^4.4.0", - "mathjs": "^3.5.1", - "memcached": "^2.2.2", - "moment": "^2.15.2", - "mongodb": "^2.2.9", - "opbeat": "^3.21.0", - "snappy": "^5.0.5" - }, - "repository": { - "type": "git", - "url": "https://github.com/corradio/electricitymap.git" - }, - "scripts": { - } -} diff --git a/feeder/parsers/__init__.py b/feeder/parsers/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/feeder/push_cache.js b/feeder/push_cache.js deleted file mode 100644 index ce5bf3471d..0000000000 --- a/feeder/push_cache.js +++ /dev/null @@ -1,90 +0,0 @@ -var isProduction = process.env.ENV === 'production'; - -console.log('Starting push_cache..'); - -// * Opbeat (must be the first thing started) -if (isProduction) { - var opbeat = require('opbeat').start({ - appId: 'c36849e44e', - organizationId: '093c53b0da9d43c4976cd0737fe0f2b1', - secretToken: process.env['OPBEAT_SECRET'] - }); -} -function handleError(err) { - if (!err) return; - if (opbeat) opbeat.captureError(err); - console.error(err); -} - -// Require -var async = require('async'); -var d3 = require('d3'); -var moment = require('moment'); - -// Custom modules -global.__base = __dirname; -var db = require('../shared/database'); - -db.connect(function (err, _) { - if (err) throw (err); - // cache key accessors - var CACHE_KEY_PREFIX_HISTORY_CO2 = 'HISTORY_'; - - return async.series([ - // 1 -- Set current state - function(callback) { - console.log('Querying current state..'); - return db.queryLastValues(function (err, obj) { - console.log('Pushing current state..'); - return db.setCache('cache', - obj, 24 * 3600, callback); - }) - }, - // 2 -- Set state history - function(callback) { - console.log('Querying state history..'); - var now = moment(); - var before = moment(now).subtract(1, 'day'); - var dates = d3.timeMinute.every(15).range(before.toDate(), now.toDate()); - - var queryTasks = dates.map(function(d) { - return function (callback) { - return db.queryLastValuesBeforeDatetimeWithExpiration(d, 4 * 3600, callback) - }; - }); - // Do a series call to avoid too much work on the database - return async.series(queryTasks, function (err, objs) { - if (err) { - return handleError(err); - } - // Iterate for each country - console.log('Pushing histories..'); - countryCodes = d3.keys(objs[objs.length - 1].countries); - var insertTasks = countryCodes.map(function (countryCode) { - // The datetime used is the datetime of the query - // because we query the best known state of the whole grid - // not just that specific country - var ts = objs.map(function (d, i) { - var country = d.countries[countryCode] || {}; - // Add a marker representing the query time - country.stateDatetime = dates[i]; - return country; - }); - // Push to cache - return function (callback) { - db.setCache( - CACHE_KEY_PREFIX_HISTORY_CO2 + countryCode, - ts, 24 * 3600, - callback); - } - }); - return async.parallel(insertTasks, callback); - }); - } - ], function(err) { - if (err) handleError(err); - // done - console.log('..done') - process.exit(); - }); -}); diff --git a/feeder/requirements.txt b/feeder/requirements.txt deleted file mode 100644 index c413372a95..0000000000 --- a/feeder/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -arrow==0.5.0 -beautifulsoup4==4.5.1 -Cython==0.23.4 -opbeat==3.5.1 -pandas==0.16.2 -pygrib==2.0.1 -pymemcache==1.4.0 -pymongo==3.2.2 -python-snappy==0.5 -requests==2.10.0 -schedule==0.3.2 -xlrd==1.0.0 diff --git a/feeder/parsers/ENTSOE.py b/parsers/ENTSOE.py similarity index 100% rename from feeder/parsers/ENTSOE.py rename to parsers/ENTSOE.py diff --git a/feeder/parsers/FR.py b/parsers/FR.py similarity index 100% rename from feeder/parsers/FR.py rename to parsers/FR.py diff --git a/feeder/parsers/IS.py b/parsers/IS.py similarity index 100% rename from feeder/parsers/IS.py rename to parsers/IS.py diff --git a/feeder/__init__.py b/parsers/__init__.py similarity index 100% rename from feeder/__init__.py rename to parsers/__init__.py diff --git a/feeder/parsers/weather.py b/parsers/weather.py similarity index 100% rename from feeder/parsers/weather.py rename to parsers/weather.py diff --git a/production_gce.yml b/production_gce.yml index 7c2ac71d7f..61c1512af5 100644 --- a/production_gce.yml +++ b/production_gce.yml @@ -1,34 +1,14 @@ version: '2' services: web: - depends_on: [mongo, memcached] - env_file: [./mailgun.env, ./secrets.env] environment: - ENV=production - - MEMCACHED_HOST=memcached - - MONGO_URL=mongodb://mongo:27017/electricity - VIRTUAL_HOST=electricitymap.org,www.electricitymap.org,electricitymap.tmrow.co image: eu.gcr.io/tmrow-152415/electricitymap_web:production mem_limit: 200M networks: [default, infrastructure] # required to be able to com' with statsd & nginx volumes: - /home/shared/electricitymap/static/dist:/home/web/public/dist - feeder: - depends_on: [mongo] - env_file: [./mailgun.env, ./secrets.env] - environment: - - ENV=production - - MEMCACHED_HOST=memcached - - MONGO_URL=mongodb://mongo:27017/electricity - image: eu.gcr.io/tmrow-152415/electricitymap_feeder:production - mem_limit: 400M - networks: [default, infrastructure] # required to be able to com' with statsd - restart: unless-stopped - memcached: - image: memcached - mongo: - image: mongo - volumes: ['/home/shared/electricitymap/mongodata:/data/db'] networks: infrastructure: diff --git a/web/app/main.js b/web/app/main.js index 976ff55434..7aa67946fa 100644 --- a/web/app/main.js +++ b/web/app/main.js @@ -30,7 +30,7 @@ var REFRESH_TIME_MINUTES = 5; // Global State var selectedCountryCode; -var forceRemoteEndpoint = false; +var useRemoteEndpoint = true; var customDate; var timelineEnabled = false; var currentMoment; @@ -75,8 +75,8 @@ args.forEach(function(arg) { // Store in history state to be able to reconstruct replaceHistoryState(kv[0], kv[1]); if (kv[0] == 'remote') { - forceRemoteEndpoint = kv[1] == 'true'; - replaceHistoryState('remote', forceRemoteEndpoint); + useRemoteEndpoint = kv[1] == 'true'; + replaceHistoryState('remote', useRemoteEndpoint); } else if (kv[0] == 'datetime') { customDate = kv[1]; replaceHistoryState('datetime', customDate); @@ -97,9 +97,10 @@ var solarEnabled = showSolarOption ? (Cookies.get('solarEnabled') == 'true' || f var colorBlindModeEnabled = Cookies.get('colorBlindModeEnabled') == 'true' || false; var isLocalhost = window.location.href.indexOf('//electricitymap') == -1; var isEmbedded = window.top !== window.self; -var REMOTE_ENDPOINT = '//www.electricitymap.org'; -var ENDPOINT = (document.domain != '' && document.domain.indexOf('electricitymap') == -1 && !forceRemoteEndpoint) ? - '' : REMOTE_ENDPOINT; +var REMOTE_ENDPOINT = '//api.electricitymap.org'; +var LOCAL_ENDPOINT = '//localhost:9000'; +var ENDPOINT = (document.domain != '' && document.domain.indexOf('electricitymap') == -1 && !useRemoteEndpoint) ? + LOCAL_ENDPOINT : REMOTE_ENDPOINT; if (!isLocalhost) { if (typeof _opbeat !== 'undefined') @@ -559,7 +560,7 @@ function dataLoaded(err, clientVersion, state, argSolar, argWind) { // Is there a new version? d3.select('#new-version') - .style('top', (clientVersion === bundleHash || forceRemoteEndpoint) ? undefined : 0); + .style('top', (clientVersion === bundleHash || useRemoteEndpoint) ? undefined : 0); currentMoment = (customDate && moment(customDate) || moment(state.datetime)); d3.select('#current-date').text(currentMoment.format('LL')); diff --git a/web/package.json b/web/package.json index 42476933a6..e06ce378d1 100644 --- a/web/package.json +++ b/web/package.json @@ -12,12 +12,8 @@ "i18n": "^0.8.3", "js-cookie": "^2.1.3", "json-loader": "git://github.com/webpack/json-loader.git#780f438d6e47155496d0b8ac7a9a3b35edc65e9a", - "mathjs": "^3.5.1", - "memcached": "^2.2.2", "moment": "^2.15.2", - "mongodb": "^2.2.9", "opbeat": "^4.11.0", - "snappy": "^5.0.5", "topojson": "^2.2.0" }, "devDependencies": { diff --git a/web/server.js b/web/server.js index 56bffe158a..3b20341bff 100644 --- a/web/server.js +++ b/web/server.js @@ -17,17 +17,8 @@ var d3 = require('d3'); var express = require('express'); var fs = require('fs'); var http = require('http'); -var Memcached = require('memcached'); -var moment = require('moment'); -var MongoClient = require('mongodb').MongoClient; var i18n = require('i18n'); -//var statsd = require('node-statsd'); // TODO: Remove - -// Custom modules -global.__base = __dirname; -var db = require('../shared/database') - var app = express(); var server = http.Server(app); @@ -90,380 +81,17 @@ SUPPORTED_FB_LOCALES = [ var BUNDLE_HASH = !isProduction ? 'dev' : JSON.parse(fs.readFileSync(STATIC_PATH + '/dist/manifest.json')).hash; -// * Cache -var memcachedClient = new Memcached(process.env['MEMCACHED_HOST']); - // * Opbeat -if (isProduction) - app.use(opbeat.middleware.express()) -function handleError(err) { - if (!err) return; - if (opbeat) opbeat.captureError(err); - console.error(err); -} - -// * Database -var mongoProductionCollection; -var mongoExchangeCollection; -var mongoPriceCollection; -db.connect(function(err, db) { - if (err) throw (err); - console.log('Connected to database'); - mongoExchangeCollection = db.collection('exchange'); - mongoProductionCollection = db.collection('production'); - mongoPriceCollection = db.collection('price'); - - // Start the application - server.listen(8000, function() { - console.log('Listening on *:8000'); - }); -}); - -// * Metrics -// var statsdClient = new statsd.StatsD(); -// statsdClient.post = 8125; -// statsdClient.host = process.env['STATSD_HOST']; -// statsdClient.prefix = 'electricymap_api.'; -// statsdClient.socket.on('error', function(error) { -// handleError(error); -// }); +// if (isProduction) +// app.use(opbeat.middleware.express()) +// function handleError(err) { +// if (!err) return; +// if (opbeat) opbeat.captureError(err); +// console.error(err); +// } -// * Routes -app.get('/v1/wind', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - var t0 = (new Date().getTime()); - //statsdClient.increment('v1_wind_GET'); - var cacheQuery = false;//req.query.datetime == null; - var cacheResponse = req.query.datetime == null; - now = req.query.datetime ? new Date(req.query.datetime) : moment.utc().toDate(); - getParsedForecasts('wind', now, cacheQuery, function(err, obj) { - if (err) { - handleError(err); - res.status(500).send('Unknown server error'); - } else if (!obj) { - res.status(500).send('No data'); - } else { - var deltaMs = new Date().getTime() - t0; - obj['took'] = deltaMs + 'ms'; -// statsdClient.timing('wind_GET', deltaMs); - if (cacheResponse) { - var beforeTargetTime = moment(obj.forecasts[0][0].header.refTime) - .add(obj.forecasts[0][0].header.forecastTime, 'hours'); - var afterTargetTime = moment(obj.forecasts[1][0].header.refTime) - .add(obj.forecasts[1][0].header.forecastTime, 'hours'); - // This cache system ignore the fact that a newer forecast, - // for the same target, can be fetched. - res.setHeader('Cache-Control', 'public'); - // Expires at/after the upper bound (to force refresh after) - res.setHeader('Expires', afterTargetTime.toDate().toUTCString()); - // Last-modified at the lower bound (to force refresh before) - res.setHeader('Last-Modified', beforeTargetTime.toDate().toUTCString()); - } - res.json(obj); - } - }); -}); -app.get('/v1/solar', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - var t0 = (new Date().getTime()); - //statsdClient.increment('v1_solar_GET'); - var cacheQuery = false;//req.query.datetime == null; - var cacheResponse = req.query.datetime == null; - now = req.query.datetime ? new Date(req.query.datetime) : moment.utc().toDate(); - getParsedForecasts('solar', now, cacheQuery, function(err, obj) { - if (err) { - handleError(err); - res.status(500).send('Unknown server error'); - } else if (!obj) { - res.status(500).send('No data'); - } else { - var deltaMs = new Date().getTime() - t0; - obj['took'] = deltaMs + 'ms'; - //statsdClient.timing('solar_GET', deltaMs); - if (cacheResponse) { - var beforeTargetTime = moment(obj.forecasts[0].header.refTime) - .add(obj.forecasts[0].header.forecastTime, 'hours'); - var afterTargetTime = moment(obj.forecasts[1].header.refTime) - .add(obj.forecasts[1].header.forecastTime, 'hours'); - // This cache system ignore the fact that a newer forecast, - // for the same target, can be fetched. - res.setHeader('Cache-Control', 'public'); - // Expires at/after the upper bound (to force refresh after) - res.setHeader('Expires', afterTargetTime.toDate().toUTCString()); - // Last-modified at the lower bound (to force refresh before) - res.setHeader('Last-Modified', beforeTargetTime.toDate().toUTCString()); - res.json(obj); - } - } - }); -}); -app.get('/v1/state', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - //statsdClient.increment('v1_state_GET'); - var t0 = new Date().getTime(); - function returnObj(obj, cached) { - var deltaMs = new Date().getTime() - t0; - res.json({status: 'ok', data: obj, took: deltaMs + 'ms', cached: cached}); - } - if (req.query.datetime) { - // Ignore requests in the future - if (moment(req.query.datetime) > moment.now()) - returnObj({countries: {}, exchanges: {}}, false); - db.queryLastValuesBeforeDatetime(req.query.datetime, function (err, result) { - if (err) { - //statsdClient.increment('state_GET_ERROR'); - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - returnObj(result, false); - } - }); - } else { - return db.getCached('state', - function (err, data, cached) { - if (err) { - if (opbeat) - opbeat.captureError(err); - console.error(err); - return res.status(500) - .json({error: 'Unknown database error'}); - } - returnObj(data || {'countries': [], 'exchanges': []}, cached); - }, - 5 * 60, - db.queryLastValues); - } -}); -app.get('/v1/co2', function(req, res) { - //statsdClient.increment('v1_co2_GET'); - var t0 = new Date().getTime(); - var countryCode = req.query.countryCode; - - function getCachedState(callback) { - return db.getCached('state', - callback, - 5 * 60, - db.queryLastValues); - } - - // TODO: Rewrite this api with two promises [geocoder, state] - function onCo2Computed(err, obj, cached) { - var countries = obj.countries; - if (err) { - //statsdClient.increment('co2_GET_ERROR'); - handleError(err); - res.status(500).json({error: 'Unknown error'}); - } else { - var deltaMs = new Date().getTime() - t0; - responseObject = { - status: 'ok', - countryCode: countryCode, - co2intensity: (countries[countryCode] || {}).co2intensity, - unit: 'gCo2eq/kWh', - data: countries[countryCode], - cached: cached - }; - responseObject.took = deltaMs + 'ms'; - res.json(responseObject); - //statsdClient.timing('co2_GET', deltaMs); - } - } - - if ((req.query.lon && req.query.lat) || countryCode) { - if (!countryCode) { - // Geocode - http.get( - 'http://maps.googleapis.com/maps/api/geocode/json?latlng=' + req.query.lat + ',' + req.query.lon, - function (geocoderResponse) { - var body = ''; - geocoderResponse.on('data', function(chunk) { body += chunk; }); - geocoderResponse.on('end', function() { - var obj = JSON.parse(body).results[0].address_components - .filter(function(d) { return d.types.indexOf('country') != -1; }); - if (obj.length) { - countryCode = obj[0].short_name; - getCachedState(onCo2Computed); - } - else { - console.error('Geocoder returned no usable results'); - res.status(500).json({error: 'Error while geocoding'}); - } - }); - } - ).on('error', (e) => { - console.error(`Error while geocoding: ${e.message}`); - res.status(500).json({error: 'Error while geocoding'}); - }); - } else { - getCachedState(onCo2Computed); - } - } else { - res.status(400).json({'error': 'Missing arguments "lon" and "lat" or "countryCode"'}) - } -}); -app.get('/v1/exchanges', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - var countryCode = req.query.countryCode; - var datetime = req.query.datetime; - if (!countryCode) { - res.status(400).json({'error': 'Missing argument "countryCode"'}); - return; - } - var maxDate = datetime ? new Date(datetime) : undefined; - var minDate = (moment(maxDate) || moment.utc()).subtract(24, 'hours').toDate(); - mongoExchangeCollection.distinct('sortedCountryCodes', - {datetime: db.rangeQuery(minDate, maxDate)}, - function(err, sortedCountryCodes) { - if (err) { - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - sortedCountryCodes = sortedCountryCodes.filter(function(d) { - var arr = d.split('->') - var from = arr[0]; var to = arr[1]; - return (from === countryCode || to === countryCode); - }); - db.queryElements('sortedCountryCodes', sortedCountryCodes, - mongoExchangeCollection, minDate, maxDate, - function(err, data) { - if (err) { - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - res.json({status: 'ok', data: data}); - } - }); - } - }) -}); -app.get('/v1/production', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - var countryCode = req.query.countryCode; - var datetime = req.query.datetime; - if (!countryCode) { - res.status(400).json({'error': 'Missing argument "countryCode"'}); - return; - } - var maxDate = datetime ? new Date(datetime) : undefined; - var minDate = (moment(maxDate) || moment.utc()).subtract(24, 'hours').toDate(); - mongoProductionCollection.findOne( - db.elementQuery('countryCode', countryCode, minDate, maxDate), - { sort: [['datetime', -1]] }, - function(err, doc) { - if (err) { - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - res.json(doc); - } - }) -}); - -app.get('/v1/price', function(req, res) { - if (req.query.datetime) return res.status(401).send('Unauthorized. Please contact admin@tmrow.co'); - var countryCode = req.query.countryCode; - var datetime = req.query.datetime; - if (!countryCode) { - res.status(400).json({'error': 'Missing argument "countryCode"'}); - return; - } - var maxDate = datetime ? new Date(datetime) : undefined; - var minDate = (moment(maxDate) || moment.utc()).subtract(24, 'hours').toDate(); - mongoPriceCollection.findOne( - db.elementQuery('countryCode', countryCode, minDate, maxDate), - { sort: [['datetime', -1]] }, - function(err, doc) { - if (err) { - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - res.json(doc); - } - }) -}); - -// *** V2 *** -function handleForecastQuery(key, req, res) { - var t0 = (new Date().getTime()); - //statsdClient.increment('v1_wind_GET'); - var cacheResponse = req.query.datetime == null; - if (!req.query.refTime) - return res.status(400).json({'error': 'Parameter `refTime` is missing'}); - if (!req.query.targetTime) - return res.status(400).json({'error': 'Parameter `targetTime` is missing'}); - db.queryGfsAt(key, req.query.refTime, req.query.targetTime, (err, obj) => { - if (err) { - handleError(err); - return res.status(500).send('Unknown server error'); - } else if (!obj) { - return res.status(404).send('Forecast was not found'); - } else { - return db.decompressGfs(obj['data'].buffer, (err, result) => { - if (err) { - handleError(err); - return res.status(500).send('Unknown server error'); - } - // statsdClient.timing('wind_GET', deltaMs); - if (cacheResponse) { - // Cache for max 1d - res.setHeader('Cache-Control', 'public, max-age=86400, s-max-age=86400'); - // Last-modified at the lower bound (to force refresh before) - res.setHeader('Last-Modified', - (obj['updatedAt'] || obj['createdAt']).toUTCString()); - } - var deltaMs = new Date().getTime() - t0; - res.json({ - data: result, - took: deltaMs + 'ms' - }); - }); - } - }); -} -app.get('/v2/gfs/:key', function(req, res) { - return handleForecastQuery(req.params.key, req, res); -}); - -app.get('/v2/co2LastDay', function(req, res) { - // TODO: Remove - res.redirect(301, '/v2/history?countryCode=' + req.query.countryCode); -}); -app.get('/v2/history', function(req, res) { - var countryCode = req.query.countryCode; - if (!countryCode) return res.status(400).send('countryCode required'); - - return db.getCached('HISTORY_' + countryCode, - function (err, data, cached) { - if (err) { - if (opbeat) - opbeat.captureError(err); - console.error(err); - res.status(500).send('Unknown database error'); - // } else if (!data) { - // res.status(500).send('No data was found'); - } else { - res.json({ 'data': data, 'cached': cached }) - } - }); -}); - -// *** UNVERSIONED *** app.get('/health', function(req, res) { - //statsdClient.increment('health_GET'); - var EXPIRATION_SECONDS = 30 * 60.0; - mongoProductionCollection.findOne({}, {sort: [['datetime', -1]]}, function (err, doc) { - if (err || !doc) { - console.error(err || 'No data found'); - handleError(err); - res.status(500).json({error: 'Unknown database error'}); - } else { - var deltaMs = new Date().getTime() - new Date(doc.datetime).getTime(); - if (deltaMs < 0 && deltaMs > EXPIRATION_SECONDS * 1000.0) - res.status(500).json({error: 'Database is empty or last measurement is too old'}); - else - res.json({status: 'ok'}); - } - }); + return res.json({status: 'ok'}); }); app.get('/clientVersion', function(req, res) { res.send(BUNDLE_HASH); @@ -475,7 +103,7 @@ app.get('/', function(req, res) { var isSubDomain = req.get('host').indexOf('electricitymap.tmrow.co') != -1; if (isSubDomain && (req.headers['user-agent'] || '').indexOf('facebookexternalhit') == -1) { // Redirect - res.redirect(301, 'http://www.electricitymap.org' + req.path); + res.redirect(301, 'https://www.electricitymap.org' + req.path); } else { // Set locale if facebook requests it if (req.query.fb_locale) { @@ -493,3 +121,11 @@ app.get('/', function(req, res) { }); } }); +app.get('/*', function(req, res) { + return res.redirect(301, 'https://api.electricitymap.org' + req.path); +}); + +// Start the application +server.listen(8000, function() { + console.log('Listening on *:8000'); +});