-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Do not display token as info message * Implement simple Report Utility to compute totals within a time period - AccumulationReportGenerator - Sample script: report-generator-example.py
- Loading branch information
Showing
11 changed files
with
336 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
import logging | ||
from pprint import pprint | ||
from datetime import datetime | ||
|
||
from ..api.connection import Api | ||
from ..api.exceptions import HttpNotFoundError, HttpClientError | ||
from ..stream.data import StreamData | ||
from ..utils.gid import * | ||
from ..utils.basic import datetime_to_str | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
class BaseReportGenerator(object): | ||
_api = None | ||
_stream_slugs = [] | ||
_streams = [] | ||
|
||
def __init__(self, api): | ||
self._api = api | ||
self._clean() | ||
|
||
def _clean(self): | ||
self._stream_slugs = [] | ||
self._streams = [] | ||
|
||
def _add_streams(self, streams): | ||
|
||
# pprint.pprint(streams) | ||
logger.debug('Adding {} streams'.format(streams['count'])) | ||
self._stream_slugs += [s['slug'] for s in streams['results']] | ||
self._streams += streams['results'] | ||
|
||
def _fetch_streams_from_project_slug(self, slug): | ||
project_slug = IOTileProjectSlug(slug) | ||
try: | ||
streams = self._api.stream().get(project=str(project_slug)) | ||
self._add_streams(streams) | ||
except HttpClientError as e: | ||
logger.warning(e) | ||
|
||
def _fetch_streams_from_device_slug(self, slug): | ||
device_slug = IOTileDeviceSlug(slug) | ||
|
||
try: | ||
streams = self._api.stream().get(device=str(device_slug)) | ||
self._add_streams(streams) | ||
except HttpClientError as e: | ||
logger.warning(e) | ||
|
||
def _fetch_stream_from_slug(self, slug): | ||
stream_slug = IOTileStreamSlug(slug) | ||
|
||
try: | ||
stream = self._api.stream(str(stream_slug)).get() | ||
self._stream_slugs.append(stream['slug']) | ||
self._streams += [stream,] | ||
except HttpClientError as e: | ||
logger.warning(e) | ||
|
||
def _process_data(self, start, end=None): | ||
logger.error('_process_data must be implemented') | ||
return {} | ||
|
||
def compute_sum(self, sources, start, end=None): | ||
factory = { | ||
'p--': self._fetch_streams_from_project_slug, | ||
'd--': self._fetch_streams_from_device_slug, | ||
's--': self._fetch_stream_from_slug, | ||
} | ||
|
||
# Given the list of source slugs (project or device), get a unified list of devices | ||
self._clean() | ||
for src in sources: | ||
prefix = src[0:3] | ||
if prefix in factory: | ||
factory[prefix](src) | ||
else: | ||
logger.error('Illegal source slug: {}'.format(src)) | ||
|
||
if len(self._streams): | ||
logger.info('Processing {} streams'.format(len(self._streams))) | ||
stats = self._process_data(start, end) | ||
else: | ||
msg = 'No streams were found for these GIDs' | ||
logger.error(msg) | ||
stats = { 'error': msg } | ||
|
||
return stats | ||
|
||
|
||
class AccumulationReportGenerator(BaseReportGenerator): | ||
""" | ||
For every stream, compute the total sum of its data | ||
Compute grand total across all streams | ||
""" | ||
|
||
def __init__(self, api): | ||
super(AccumulationReportGenerator, self).__init__(api) | ||
|
||
def _process_data(self, start, end=None): | ||
logger.debug('Processing Data from {0} to {1}'.format(start, end)) | ||
|
||
if end: | ||
end = datetime_to_str(end) | ||
else: | ||
end = datetime_to_str(datetime.utcnow()) | ||
|
||
start = datetime_to_str(start) | ||
logger.debug('--> start={0}, end={1}'.format(start, end)) | ||
|
||
stream_stats = { | ||
'streams': {}, | ||
'total': 0 | ||
} | ||
# pprint(self._streams) | ||
for stream in self._streams: | ||
stream_data = StreamData(stream['slug'], self._api) | ||
try: | ||
stream_data.initialize_from_server(start=start, end=end) | ||
except HttpNotFoundError as e: | ||
logger.error(e) | ||
|
||
sum = 0 | ||
for item in stream_data.data: | ||
sum += item['output_value'] | ||
|
||
if sum: | ||
stream_stats['streams'][stream['slug']] = { | ||
'sum': sum, | ||
'units': stream['output_unit']['unit_short'] | ||
} | ||
stream_stats['total'] += sum | ||
|
||
return stream_stats |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
|
||
def datetime_to_str(dt): | ||
return dt.strftime('%Y-%m-%dT%H:%M:%SZ') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
""" | ||
Script to compute totals across multiple projects, devices and/or streams. | ||
Usage: | ||
- python report-generator-example.py -u [email protected] | ||
--t0 <start date> --t1 <end date> | ||
source [sources] | ||
Examples: | ||
- Generate report for September for project 1, project 2, device 0x111 and device 0x222: | ||
--t0 2017-09-01 --t1 2017-09-30 p--0001 p--0000-00002 d--1111 d--0000-0000-0000-2222 | ||
- Generate report for September for streams s--0000-0001--0000-0000-0000-1111--5001 and | ||
s--0000-0001--0000-0000-0000-1111--5002: | ||
--t0 2017-09-01 --t1 2017-09-30 s--0000-0001--0000-0000-0000-1111--5001 s--0000-0001--0000-0000-0000-1111--5002 | ||
""" | ||
import sys | ||
import argparse | ||
import getpass | ||
import logging | ||
from pprint import pprint | ||
from datetime import datetime | ||
from dateutil.parser import parse as dt_parse | ||
|
||
from iotile_cloud.api.connection import Api | ||
from iotile_cloud.stream.report import AccumulationReportGenerator | ||
|
||
PRODUCTION_DOMAIN_NAME = 'https://iotile.cloud' | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
if __name__ == '__main__': | ||
# Test | ||
# Logger Format | ||
logging.basicConfig(level=logging.DEBUG, | ||
format='[%(asctime)-15s] %(levelname)-6s %(message)s', | ||
datefmt='%d/%b/%Y %H:%M:%S') | ||
|
||
parser = argparse.ArgumentParser(description=__doc__) | ||
parser.add_argument('-u', '--user', dest='email', type=str, help='Email used for login') | ||
parser.add_argument('--t0', dest='t0', type=str, help='Start Date') | ||
parser.add_argument('--t1', dest='t1', type=str, help='End Date') | ||
|
||
parser.add_argument('sources', metavar='sources', nargs='+', type=str, help='Report source (projects, devices, streams)') | ||
|
||
args = parser.parse_args() | ||
logger.info('--------------') | ||
|
||
if not args.email: | ||
logger.error('User email is required: --user') | ||
sys.exit(1) | ||
|
||
if not args.t0: | ||
logger.error('Start Date is required: --t0') | ||
sys.exit(1) | ||
|
||
try: | ||
t0 = dt_parse(args.t0) | ||
except Exception as e: | ||
logger.error(e) | ||
sys.exit(1) | ||
|
||
if not args.t1: | ||
t1 = datetime.utcnow() | ||
else: | ||
try: | ||
t1 = dt_parse(args.t1) | ||
except Exception as e: | ||
logger.error(e) | ||
sys.exit(1) | ||
|
||
password = getpass.getpass() | ||
|
||
domain = PRODUCTION_DOMAIN_NAME | ||
|
||
logger.info('Using Server: {0}'.format(domain)) | ||
c = Api(domain) | ||
|
||
ok = c.login(email=args.email, password=password) | ||
if ok: | ||
logger.info('Welcome {0}'.format(args.email)) | ||
|
||
gen = AccumulationReportGenerator(c) | ||
stats =gen.compute_sum(sources=args.sources, start=t0, end=t1) | ||
|
||
pprint(stats) | ||
|
||
logger.info('Goodbye!!') | ||
c.logout() |
File renamed without changes.
File renamed without changes.
Oops, something went wrong.