Skip to content

Commit

Permalink
Task/generalise entsoe (electricitymaps#102)
Browse files Browse the repository at this point in the history
* Crash for missing datetime

* Initial commit. Fixes electricitymaps#99 and Fixes electricitymaps#95

* Fixes electricitymaps#93

* Remove verbose

* Made a ENTSOE library. Portugal switched to ENTSOE.

* Fix parsing and display bugs

* Added sessions. Updated README

* Remove NL
  • Loading branch information
corradio authored Oct 22, 2016
1 parent 5519cc0 commit 56330e9
Show file tree
Hide file tree
Showing 24 changed files with 201 additions and 131 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ Each country has a GHG mass flow that depends on neighboring countries. In order
- Lithuania: [energinet.dk](http://www.energinet.dk/EN/El/Sider/Det-nordiske-elsystem.aspx)
- Norway: [energinet.dk](http://www.energinet.dk/EN/El/Sider/Det-nordiske-elsystem.aspx)
- Poland: [ENTSOE](https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html)
- Portugal: [REN](http://www.centrodeinformacao.ren.pt/EN/InformacaoExploracao/Pages/EstatisticaDiaria.aspx)
- Portugal: [ENTSOE](https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html)
- Romania: [Transelectrica](http://www.transelectrica.ro/en/web/tel/home)
- Spain: [REE](https://demanda.ree.es/generacion_acumulada.html)
- Sweden: [energinet.dk](http://www.energinet.dk/EN/El/Sider/Det-nordiske-elsystem.aspx)
Expand Down
3 changes: 2 additions & 1 deletion api/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
FROM node:4.5.0
WORKDIR /home
EXPOSE 8000
RUN npm install -g yarn
ADD package.json /home/package.json
RUN npm install
RUN yarn
ADD . /home
CMD node server.js
HEALTHCHECK CMD curl --fail http://localhost:8000/ || exit 1
Expand Down
2 changes: 1 addition & 1 deletion api/static/app/countrytable.js
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ CountryTable.prototype.data = function(arg) {
.transition()
.attr('x', that.LABEL_MAX_WIDTH + (that._displayByEmissions ? that.co2Scale(0) : that.powerScale(0)))
.style('display', function (d) {
return d.mode != 'unknown' && d.production === undefined ? 'block' : 'none';
return d.mode != 'unknown' && (d.production === undefined || d.production === null) ? 'block' : 'none';
});

// Construct exchanges
Expand Down
5 changes: 4 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@ services:
feeder:
build: feeder
depends_on: [mongo]
environment: [ENV=development, 'MONGO_URL=mongodb://mongo:27017/electricity']
environment:
- 'ENTSOE_TOKEN=${ENTSOE_TOKEN}'
- ENV=development
- 'MONGO_URL=mongodb://mongo:27017/electricity'
volumes:
- './api/data:/home/data'
- './feeder/feeder.py:/home/feeder.py'
Expand Down
10 changes: 7 additions & 3 deletions feeder/feeder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import glob
import pymongo
import logging, os, schedule, time
import requests

from parsers.solar import fetch_solar
from parsers.wind import fetch_wind
Expand Down Expand Up @@ -45,15 +46,18 @@ def import_country(country_code):
db = client['electricity']
col = db['realtime']

session = requests.session()

def fetch_countries():
for parser in parsers:
try:
with statsd.StatsdTimer('fetch_one_country'):
obj = parser()
obj = parser(session)
if not 'datetime' in obj:
raise Exception('datetime was not returned from %s' % parser)
if arrow.get(obj['datetime']) > arrow.get(arrow.now()):
raise Exception('Data can''t be in the future')
if arrow.get(obj['datetime']) > arrow.now():
print obj['datetime'], arrow.now()
raise Exception("Data from %s can't be in the future" % parser)
logging.info('INSERT %s' % obj)
col.insert_one(obj)
except:
Expand Down
8 changes: 5 additions & 3 deletions feeder/parsers/AT.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
COUNTRY_CODE = 'AT'
TIME_ZONE = 'Europe/Vienna'

def fetch_AT():
def fetch_AT(session=None):

r = session or requests.session()

now = arrow.now(TIME_ZONE)

Expand All @@ -23,7 +25,7 @@ def fetch_AT():
# Fetch production
url = 'https://www.apg.at/transparency/WebMethods/ChartsEtc.aspx/GetChartData'
payload = {"PID":"AGPT","DateString":"%s000000" % now.format('YYYYMMDD'),"Resolution":"15M","Language":"en","AdditionalFilter":"B19|B16|B01|B04|B05|B06|B09|B10|B11|B12|B15|B17|B20"}
obj = requests.post(url, json=payload).json()['d']['ResponseData'][1]
obj = r.post(url, json=payload).json()['d']['ResponseData'][1]
times = map(lambda d: arrow.get(d['DateLocalString'] + ' ' + d['TimeLocalFromString'], "MM/DD/YYYY HH:mm").replace(tzinfo=dateutil.tz.gettz(TIME_ZONE)).replace(minutes=+15),
obj['Times'])
# Fetch values of first item and determine end time
Expand Down Expand Up @@ -51,7 +53,7 @@ def fetch_AT():
# Get exchanges
url = 'https://www.apg.at/transparency/WebMethods/ChartsEtc.aspx/GetMapData'
payload = {"PID":"CBPF","DateString":"%s000000" % now.format('YYYYMMDD'),"Resolution":"15M","Language":"en","AdditionalFilter": None}
obj = requests.post(url, json=payload).json()['d']['ResponseData']
obj = r.post(url, json=payload).json()['d']['ResponseData']
times = map(lambda d: arrow.get(d['DateLocalString'] + ' ' + d['TimeLocalFromString'], "MM/DD/YYYY HH:mm").replace(tzinfo=dateutil.tz.gettz(TIME_ZONE)).replace(minutes=+15),
obj['Times'])
i = np.where(np.array(times) <= data['datetime'])[0][-1]
Expand Down
5 changes: 3 additions & 2 deletions feeder/parsers/CZ.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
COUNTRY_CODE = 'CZ'
TIME_ZONE = 'Europe/Prague'

def fetch_CZ():
def fetch_CZ(session=None):
r = session or requests.session()
now = arrow.now(TIME_ZONE)
url = ('http://www.ceps.cz/_layouts/15/Ceps/_Pages/GraphData.aspx?mode=txt&' +
'from=%s%%2012:00:00%%20AM&' % now.format('M/D/YYYY') +
Expand Down Expand Up @@ -39,7 +40,7 @@ def fetch_CZ():
'from=%s%%2012:00:00%%20AM&' % now.format('M/D/YYYY') +
'to=%s%%2011:59:59%%20PM&' % now.replace(days=+1).format('M/D/YYYY') +
'hasinterval=False&sol=8&lang=ENG&agr=HR&fnc=AVG&ver=RT&para1=all&')
data = pd.read_csv(StringIO(requests.get(url).text), sep=';', header=2).iloc[-1]
data = pd.read_csv(StringIO(r.get(url).text), sep=';', header=2).iloc[-1]

obj['exchange'] = {
'PL': data['PSE Actual [MW]'],
Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/DE.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

COUNTRY_CODE = 'DE'

def fetch_DE():
r = requests.session()
def fetch_DE(session=None):
r = session or requests.session()
formatted_date = arrow.now(tz='Europe/Berlin').format('DD.MM.YYYY')
url = 'https://www.agora-energiewende.de/en/topics/?type=371842&tx_agoragraphs_agoragraphs%5Breferer%5D=https%3A%2F%2Fwww.agora-energiewende.de%2Fen%2Ftopics%2F-agothem-%2FProdukt%2Fprodukt%2F76%2FAgorameter%2F&tx_agoragraphs_agoragraphs%5Baction%5D=renderPowerGeneration&tx_agoragraphs_agoragraphs%5Bcontroller%5D=Graph&tx_agoragraphs_agoragraphs%5BstartDate%5D={}&tx_agoragraphs_agoragraphs%5BendDate%5D={}'.format(formatted_date, formatted_date)
response = r.get(url)
Expand Down
5 changes: 3 additions & 2 deletions feeder/parsers/DK.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@

COUNTRY_CODE = 'DK'

def fetch_DK():
def fetch_DK(session=None):
r = session or requests.session()
url = 'http://energinet.dk/_layouts/FlashProxy.asmx'
headers = {
'Content-Type': 'text/xml; charset=utf-8',
Expand All @@ -22,7 +23,7 @@ def fetch_DK():
</soap:Body>
</soap:Envelope>"""

response = requests.post(url, data=body, headers=headers)
response = r.post(url, data=body, headers=headers)
root = ET.fromstring(response.content)
data = root[0][0][0][0][0][0].attrib

Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/EE.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

COUNTRY_CODE = 'EE'

def fetch_EE():
def fetch_EE(session=None):
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview'

data = requests.get(url).json()
data = (session or requests).get(url).json()
countries = map(lambda x: x['value'], data['Headers'])
i = countries.index(COUNTRY_CODE)

Expand Down
141 changes: 141 additions & 0 deletions feeder/parsers/ENTSOE.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
from bs4 import BeautifulSoup
import arrow, os, re, requests

ENTSOE_ENDPOINT = 'https://transparency.entsoe.eu/api'
ENTSOE_PARAMETER_DESC = {
'B01': 'Biomass',
'B02': 'Fossil Brown coal/Lignite',
'B03': 'Fossil Coal-derived gas',
'B04': 'Fossil Gas',
'B05': 'Fossil Hard coal',
'B06': 'Fossil Oil',
'B07': 'Fossil Oil shale',
'B08': 'Fossil Peat',
'B09': 'Geothermal',
'B10': 'Hydro Pumped Storage',
'B11': 'Hydro Run-of-river and poundage',
'B12': 'Hydro Water Reservoir',
'B13': 'Marine',
'B14': 'Nuclear',
'B15': 'Other renewable',
'B16': 'Solar',
'B17': 'Waste',
'B18': 'Wind Offshore',
'B19': 'Wind Onshore',
'B20': 'Other',
}
ENTSOE_PARAMETER_BY_DESC = {v: k for k, v in ENTSOE_PARAMETER_DESC.iteritems()}

def query(psr_type, in_domain, session):
now = arrow.utcnow()
params = {
'psrType': psr_type,
'documentType': 'A75',
'processType': 'A16',
'in_Domain': in_domain,
'periodStart': now.replace(hours=-24).format('YYYYMMDDHH00'),
'periodEnd': now.replace(hours=+24).format('YYYYMMDDHH00'),
'securityToken': os.environ['ENTSOE_TOKEN']
}
response = session.get(ENTSOE_ENDPOINT, params=params)
if response.ok: return response.text

def datetime_from_position(start, position, resolution):
m = re.search('PT(\d+)([M])', resolution)
if m:
digits = int(m.group(1))
scale = m.group(2)
if scale == 'M':
return start.replace(minutes=position * digits)
raise NotImplementedError('Could not recognise resolution %s' % resolution)

def parse(xml_text):
if not xml_text: return None
soup = BeautifulSoup(xml_text, 'html.parser')
# Get the first time series (realised)
timeseries = soup.find_all('timeseries')[0]
resolution = timeseries.find_all('resolution')[0].contents[0]
datetime_start = arrow.get(timeseries.find_all('start')[0].contents[0])
# Get the last point
last_entry = timeseries.find_all('point')[-1]
quantity = float(last_entry.find_all('quantity')[0].contents[0])
position = int(last_entry.find_all('position')[0].contents[0])
datetime = datetime_from_position(datetime_start, position, resolution)
return quantity, datetime

def get_biomass(values):
if 'Biomass' in values or 'Fossil Peat' in values or 'Waste' in values:
return values.get('Biomass', 0) + \
values.get('Fossil Peat', 0) + \
values.get('Waste', 0)

def get_coal(values):
if 'Fossil Brown coal/Lignite' in values or 'Fossil Hard coal' in values:
return values.get('Fossil Brown coal/Lignite', 0) + \
values.get('Fossil Hard coal', 0)

def get_gas(values):
if 'Fossil Coal-derived gas' in values or 'Fossil Gas' in values:
return values.get('Fossil Coal-derived gas', 0) + \
values.get('Fossil Gas', 0)

def get_hydro(values):
if 'Hydro Pumped Storage' in values \
or 'Hydro Run-of-river and poundage' in values \
or 'Hydro Water Reservoir' in values:
return max(values.get('Hydro Pumped Storage', 0), 0) + \
values.get('Hydro Run-of-river and poundage', 0) + \
values.get('Hydro Water Reservoir', 0)

def get_oil(values):
if 'Fossil Oil' in values or 'Fossil Oil shale' in values:
return values.get('Fossil Oil', 0) + values.get('Fossil Oil shale', 0)

def get_wind(values):
if 'Wind Onshore' in values or 'Wind Offshore' in values:
return values.get('Wind Onshore', 0) + values.get('Wind Offshore', 0)

def get_unknown(values):
if 'Geothermal' in values \
or 'Marine' in values \
or 'Other renewable' in values \
or 'Other' in values:
return values.get('Geothermal', 0) + \
values.get('Marine', 0) + \
values.get('Other renewable', 0) + \
values.get('Other', 0)

def fetch_ENTSOE(in_domain, country_code, session=None):
if not session: session = requests.session()
output_pairs = {}
for k in ENTSOE_PARAMETER_DESC.keys():
parsed = parse(query(k, in_domain, session))
if parsed: output_pairs[k] = parsed
dates = set(map(lambda x: x[1], output_pairs.values()))
if not len(dates) == 1:
raise Exception('Measurements have been taken at different times: %s' % dates)

values = {ENTSOE_PARAMETER_DESC[k]: v[0] for k, v in output_pairs.iteritems()}

data = {
'countryCode': country_code,
'datetime': list(dates)[0].datetime,
'production': {
'biomass': values.get('Biomass', None),
'coal': get_coal(values),
'gas': get_gas(values),
'hydro': get_hydro(values),
'nuclear': values.get('Nuclear', None),
'oil': get_oil(values),
'solar': values.get('Solar', None),
'wind': get_wind(values),
'unknown': get_unknown(values)
}
}

# Sanity check
for k, v in data['production'].iteritems():
if v is None: continue
if v < 0: raise ValueError('key %s has negative value %s' % (k, v))

return data
5 changes: 2 additions & 3 deletions feeder/parsers/ES.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@

COUNTRY_CODE = 'ES'

r = requests.session()

def fetch_ES():
def fetch_ES(session=None):
r = session or requests.session()
headers = {
'Content-Type': 'text/xml; charset=utf-8',
'SOAPAction': '',
Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/FI.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@

COUNTRY_CODE = 'FI'

def fetch_FI():
def fetch_FI(session=None):
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview'
data = requests.get(url).json()
data = (session or requests).get(url).json()
countries = map(lambda x: x['value'], data['Headers'])
i = countries.index(COUNTRY_CODE)

Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/FR.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
'Pompage': 'hydro',
}

def fetch_FR():
r = requests.session()
def fetch_FR(session=None):
r = session or requests.session()
formatted_date = arrow.now(tz='Europe/Paris').format('DD/MM/YYYY')
url = 'http://www.rte-france.com/getEco2MixXml.php?type=mix&&dateDeb={}&dateFin={}&mode=NORM'.format(formatted_date, formatted_date)
response = r.get(url)
Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/GB.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@

COUNTRY_CODE = 'GB'

def fetch_GB():
def fetch_GB(session=None):
url = 'http://www.bmreports.com/bsp/additional/soapfunctions.php?element=generationbyfueltypetable'

response = requests.get(url)
response = (session or requests).get(url)
root = ET.fromstring(response.content)
data = root[0]

Expand Down
2 changes: 1 addition & 1 deletion feeder/parsers/HU.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
COUNTRY_CODE = 'HU'
TIME_ZONE = 'Europe/Budapest'

def fetch_HU():
def fetch_HU(session=None):

now = arrow.now(TIME_ZONE)

Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/LT.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

COUNTRY_CODE = 'LT'

def fetch_LT():
def fetch_LT(session=None):
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview'

data = requests.get(url).json()
data = (session or requests).get(url).json()
countries = map(lambda x: x['value'], data['Headers'])
i = countries.index(COUNTRY_CODE)

Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/LV.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

COUNTRY_CODE = 'LV'

def fetch_LV():
def fetch_LV(session=None):
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview'

data = requests.get(url).json()
data = (session or requests).get(url).json()
countries = map(lambda x: x['value'], data['Headers'])
i = countries.index(COUNTRY_CODE)

Expand Down
4 changes: 2 additions & 2 deletions feeder/parsers/NO.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@

COUNTRY_CODE = 'NO'

def fetch_NO():
def fetch_NO(session=None):
url = 'http://driftsdata.statnett.no/restapi/ProductionConsumption/GetLatestDetailedOverview'

data = requests.get(url).json()
data = (session or requests).get(url).json()
countries = map(lambda x: x['value'], data['Headers'])
i = countries.index(COUNTRY_CODE)

Expand Down
Loading

0 comments on commit 56330e9

Please sign in to comment.