Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prepared Day-ahead prices request for use in Async Frameworks #316

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 76 additions & 28 deletions entsoe/entsoe.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,19 +82,22 @@ def _base_request(self, params: Dict, start: pd.Timestamp,
-------
requests.Response
"""
start_str = self._datetime_to_str(start)
end_str = self._datetime_to_str(end)

base_params = {
'securityToken': self.api_key,
'periodStart': start_str,
'periodEnd': end_str
}
params.update(base_params)

logger.debug(f'Performing request to {URL} with params {params}')
response = self.session.get(url=URL, params=params,
proxies=self.proxies, timeout=self.timeout)
prepared_request = self._prepare_base_request(params=params, start=start, end=end)
return self._do_prepared_request(prepared_request)

def _do_prepared_request(self, prepared_request: requests.PreparedRequest) -> requests.Response:
"""
Parameters
----------
prepared_request : requests.PreparedRequest

Returns
-------
requests.Response
"""
logger.debug(f'Performing request to {prepared_request.url}')
response = self.session.send(request=prepared_request,
proxies=self.proxies, timeout=self.timeout)
try:
response.raise_for_status()
except requests.HTTPError as e:
Expand Down Expand Up @@ -132,7 +135,36 @@ def _base_request(self, params: Dict, start: pd.Timestamp,
if 'No matching data found' in response.text:
raise NoMatchingDataError
return response

def _prepare_base_request(self, params: Dict, start: pd.Timestamp,
end: pd.Timestamp) -> requests.PreparedRequest:
"""
Parameters
----------
params : dict
start : pd.Timestamp
end : pd.Timestamp

Returns
-------
requests.PreparedRequest
"""
start_str = self._datetime_to_str(start)
end_str = self._datetime_to_str(end)

base_params = {
'securityToken': self.api_key,
'periodStart': start_str,
'periodEnd': end_str
}
params.update(base_params)
req = requests.Request(
method='GET',
url=URL,
params=params
)
return req.prepare()

@staticmethod
def _datetime_to_str(dtm: pd.Timestamp) -> str:
"""
Expand Down Expand Up @@ -168,14 +200,31 @@ def query_day_ahead_prices(self, country_code: Union[Area, str],
-------
str
"""
area = lookup_area(country_code)
params = {
'documentType': 'A44',
'in_Domain': area.code,
'out_Domain': area.code
}
response = self._base_request(params=params, start=start, end=end)
prepared_request = self.prepare_query_day_ahead_prices(country_code=country_code,
start=start, end=end)
response = self._do_prepared_request(prepared_request)
return response.text

def prepare_query_day_ahead_prices(self, country_code: Union[Area, str],
start: pd.Timestamp, end: pd.Timestamp) -> requests.PreparedRequest:
"""
Parameters
----------
country_code : Area|str
start : pd.Timestamp
end : pd.Timestamp

Returns
-------
requests.PreparedRequest
"""
area = lookup_area(country_code)
params = {
'documentType': 'A44',
'in_Domain': area.code,
'out_Domain': area.code
}
return self._prepare_base_request(params=params, start=start, end=end)

def query_aggregated_bids(self, country_code: Union[Area, str],
process_type: str,
Expand Down Expand Up @@ -1182,14 +1231,13 @@ def query_day_ahead_prices(
start=start-pd.Timedelta(days=1),
end=end+pd.Timedelta(days=1)
)
series = parse_prices(text)[resolution]
if len(series) == 0:
raise NoMatchingDataError
series = series.tz_convert(area.tz)
series = series.truncate(before=start, after=end)
# because of the above fix we need to check again if any valid data exists after truncating
if len(series) == 0:
raise NoMatchingDataError
series = parse_prices(
xml_text=text,
tz=area.tz,
resolution=resolution,
start=start,
end=end
)
return series

@year_limited
Expand Down
26 changes: 24 additions & 2 deletions entsoe/parsers.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import sys
import zipfile
from io import BytesIO
from typing import Union
from typing import Literal, Union
import warnings
import bs4
from bs4.builder import XMLParsedAsHTMLWarning
import pandas as pd

from .exceptions import NoMatchingDataError
from .mappings import PSRTYPE_MAPPINGS, DOCSTATUS, BSNTYPE, Area
from .series_parsers import _extract_timeseries, _resolution_to_timedelta, _parse_datetimeindex, _parse_timeseries_generic,\
_parse_timeseries_generic_whole
Expand All @@ -18,11 +19,24 @@



def parse_prices(xml_text):
def parse_prices(
xml_text: str,
tz: str,
resolution: Literal['15min', '30min', '60min'],
start: pd.Timestamp,
end: pd.Timestamp) -> pd.Series:
"""
Parse day-ahead prices.

Also performs tz conversion and truncation.

Parameters
----------
xml_text : str
tz : str
resolution : Literal['15min', '30min', '60min']
start : pd.Timestamp
end : pd.Timestamp

Returns
-------
Expand All @@ -40,6 +54,14 @@ def parse_prices(xml_text):
for freq, freq_series in series.items():
if len(freq_series) > 0:
series[freq] = pd.concat(freq_series).sort_index()
series = series[resolution]
if len(series) == 0:
raise NoMatchingDataError
series = series.tz_convert(tz)
series = series.truncate(before=start, after=end)
# because of the above fix we need to check again if any valid data exists after truncating
if len(series) == 0:
raise NoMatchingDataError
return series


Expand Down