From 3dba1097eda0f0d4acf4e1ceb605fa5e73355646 Mon Sep 17 00:00:00 2001 From: David Karchmer Date: Thu, 5 Oct 2017 17:14:17 -0700 Subject: [PATCH] New data.report utility (#9) * Do not display token as info message * Implement simple Report Utility to compute totals within a time period - AccumulationReportGenerator - Sample script: report-generator-example.py --- README.md | 42 +++++- iotile_cloud/api/connection.py | 4 +- iotile_cloud/stream/data.py | 14 +- iotile_cloud/stream/report.py | 134 ++++++++++++++++++ iotile_cloud/utils/basic.py | 3 + report-generator-example.py | 94 ++++++++++++ tests/{api.py => test_api.py} | 0 tests/{gid.py => test_gid.py} | 0 tests/test_reports.py | 53 +++++++ tests/{resources.py => test_resources.py} | 0 tests/{stream-data.py => test_stream_data.py} | 8 +- 11 files changed, 336 insertions(+), 16 deletions(-) create mode 100644 iotile_cloud/stream/report.py create mode 100644 iotile_cloud/utils/basic.py create mode 100644 report-generator-example.py rename tests/{api.py => test_api.py} (100%) rename tests/{gid.py => test_gid.py} (100%) create mode 100644 tests/test_reports.py rename tests/{resources.py => test_resources.py} (100%) rename tests/{stream-data.py => test_stream_data.py} (89%) diff --git a/README.md b/README.md index 8bf1ddf..26186ef 100644 --- a/README.md +++ b/README.md @@ -206,9 +206,47 @@ if ok: stream_data = MyStreamData(stream_id=stream_id, api=c) stream_data.initialize_from_server(lastn=lastn) - stream_data.analyze() + stream_data.analyze() - c.logout() + c.logout() + +``` + +### User Reports + +Package includes a simple utility to generate accumulation reports: + +``` +from pprint import pprint +from iotile_cloud.api.connection import Api +from iotile_cloud.stream.report import AccumulationReportGenerator + +# Generate report for all streams from: +sources = [ + 'p--0000-0001', # Project 1 + 'd--1111', # Device 0x111 + 's--0000-0002--0000-0000-0000-2222--5001' # Stream 5001 for device 0x222 in project 2 +] +ok = c.login(email=email, password=password) +if ok: + gen = AccumulationReportGenerator(c) + stats = gen.compute_sum(sources=sources, start=t0, end=t1) + + c.logout() +``` + +Produces: + +``` +{'streams': {'s--0000-0001--0000-0000-0000-0097--5002': {'sum': 1000.0, + 'units': 'G'}, + 's--0000-0001--0000-0000-0000-00a0--5001': {'sum': 1500.0, + 'units': 'G'}, + 's--0000-0003--0000-0000-0000-1111--5001': {'sum': 2000.0, + 'units': 'G'}, + 's--0000-0002--0000-0000-0000-2222--5001': {'sum': 3000.0, + 'units': 'G'}}, + 'total': 7500.0} ``` diff --git a/iotile_cloud/api/connection.py b/iotile_cloud/api/connection.py index 5a17b19..a5a84e1 100644 --- a/iotile_cloud/api/connection.py +++ b/iotile_cloud/api/connection.py @@ -234,7 +234,7 @@ def login(self, password, email): self.token = content[self.token_type] self.username = content['username'] - logger.info('Welcome @{0} (token: {1})'.format(self.username, self.token)) + logger.debug('Welcome @{0}'.format(self.username)) return True else: logger.error('Login failed: ' + str(r.status_code) + ' ' + r.content.decode()) @@ -247,7 +247,7 @@ def logout(self): r = requests.post(url, headers=headers) if r.status_code == 204: - logger.info('Goodbye @{0}'.format(self.username)) + logger.debug('Goodbye @{0}'.format(self.username)) self.username = None self.token = None else: diff --git a/iotile_cloud/stream/data.py b/iotile_cloud/stream/data.py index 87b422b..72d9748 100644 --- a/iotile_cloud/stream/data.py +++ b/iotile_cloud/stream/data.py @@ -9,12 +9,12 @@ class BaseData(object): - _data = [] + data = [] _api = None def __init__(self, api): self._api = api - self._data = [] + self.data = [] def _get_args_dict(self, page, *args, **kwargs): parts = {} @@ -39,14 +39,14 @@ def _fetch_data(self, *args, **kwargs): def initialize_from_server(self, *args, **kwargs): logger.debug('Downloading data') page = 1 - self._data = [] + self.data = [] while page: extra = self._get_args_dict(page=page, *args, **kwargs) - logger.info('{0} ===> Downloading: {1}'.format(page, extra)) + logger.debug('{0} ===> Downloading data: {1}'.format(page, extra)) raw_data = self._fetch_data(**extra) if 'results' in raw_data: for item in raw_data['results']: - self._data.append(item) + self.data.append(item) if raw_data['next']: logger.debug('Getting more: {0}'.format(raw_data['next'])) page += 1 @@ -54,12 +54,10 @@ def initialize_from_server(self, *args, **kwargs): page = 0 logger.debug('==================================') - logger.debug('Downloaded a total of {0} records'.format(len(self._data))) + logger.debug('Downloaded a total of {0} records'.format(len(self.data))) logger.debug('==================================') - - class StreamData(BaseData): _stream_id = None diff --git a/iotile_cloud/stream/report.py b/iotile_cloud/stream/report.py new file mode 100644 index 0000000..be394ff --- /dev/null +++ b/iotile_cloud/stream/report.py @@ -0,0 +1,134 @@ +import logging +from pprint import pprint +from datetime import datetime + +from ..api.connection import Api +from ..api.exceptions import HttpNotFoundError, HttpClientError +from ..stream.data import StreamData +from ..utils.gid import * +from ..utils.basic import datetime_to_str + +logger = logging.getLogger(__name__) + +class BaseReportGenerator(object): + _api = None + _stream_slugs = [] + _streams = [] + + def __init__(self, api): + self._api = api + self._clean() + + def _clean(self): + self._stream_slugs = [] + self._streams = [] + + def _add_streams(self, streams): + + # pprint.pprint(streams) + logger.debug('Adding {} streams'.format(streams['count'])) + self._stream_slugs += [s['slug'] for s in streams['results']] + self._streams += streams['results'] + + def _fetch_streams_from_project_slug(self, slug): + project_slug = IOTileProjectSlug(slug) + try: + streams = self._api.stream().get(project=str(project_slug)) + self._add_streams(streams) + except HttpClientError as e: + logger.warning(e) + + def _fetch_streams_from_device_slug(self, slug): + device_slug = IOTileDeviceSlug(slug) + + try: + streams = self._api.stream().get(device=str(device_slug)) + self._add_streams(streams) + except HttpClientError as e: + logger.warning(e) + + def _fetch_stream_from_slug(self, slug): + stream_slug = IOTileStreamSlug(slug) + + try: + stream = self._api.stream(str(stream_slug)).get() + self._stream_slugs.append(stream['slug']) + self._streams += [stream,] + except HttpClientError as e: + logger.warning(e) + + def _process_data(self, start, end=None): + logger.error('_process_data must be implemented') + return {} + + def compute_sum(self, sources, start, end=None): + factory = { + 'p--': self._fetch_streams_from_project_slug, + 'd--': self._fetch_streams_from_device_slug, + 's--': self._fetch_stream_from_slug, + } + + # Given the list of source slugs (project or device), get a unified list of devices + self._clean() + for src in sources: + prefix = src[0:3] + if prefix in factory: + factory[prefix](src) + else: + logger.error('Illegal source slug: {}'.format(src)) + + if len(self._streams): + logger.info('Processing {} streams'.format(len(self._streams))) + stats = self._process_data(start, end) + else: + msg = 'No streams were found for these GIDs' + logger.error(msg) + stats = { 'error': msg } + + return stats + + +class AccumulationReportGenerator(BaseReportGenerator): + """ + For every stream, compute the total sum of its data + Compute grand total across all streams + """ + + def __init__(self, api): + super(AccumulationReportGenerator, self).__init__(api) + + def _process_data(self, start, end=None): + logger.debug('Processing Data from {0} to {1}'.format(start, end)) + + if end: + end = datetime_to_str(end) + else: + end = datetime_to_str(datetime.utcnow()) + + start = datetime_to_str(start) + logger.debug('--> start={0}, end={1}'.format(start, end)) + + stream_stats = { + 'streams': {}, + 'total': 0 + } + # pprint(self._streams) + for stream in self._streams: + stream_data = StreamData(stream['slug'], self._api) + try: + stream_data.initialize_from_server(start=start, end=end) + except HttpNotFoundError as e: + logger.error(e) + + sum = 0 + for item in stream_data.data: + sum += item['output_value'] + + if sum: + stream_stats['streams'][stream['slug']] = { + 'sum': sum, + 'units': stream['output_unit']['unit_short'] + } + stream_stats['total'] += sum + + return stream_stats diff --git a/iotile_cloud/utils/basic.py b/iotile_cloud/utils/basic.py new file mode 100644 index 0000000..fbeda20 --- /dev/null +++ b/iotile_cloud/utils/basic.py @@ -0,0 +1,3 @@ + +def datetime_to_str(dt): + return dt.strftime('%Y-%m-%dT%H:%M:%SZ') \ No newline at end of file diff --git a/report-generator-example.py b/report-generator-example.py new file mode 100644 index 0000000..80ca4a1 --- /dev/null +++ b/report-generator-example.py @@ -0,0 +1,94 @@ +""" +Script to compute totals across multiple projects, devices and/or streams. + +Usage: +- python report-generator-example.py -u user@test.com + --t0 --t1 + source [sources] + +Examples: +- Generate report for September for project 1, project 2, device 0x111 and device 0x222: + + --t0 2017-09-01 --t1 2017-09-30 p--0001 p--0000-00002 d--1111 d--0000-0000-0000-2222 + +- Generate report for September for streams s--0000-0001--0000-0000-0000-1111--5001 and + s--0000-0001--0000-0000-0000-1111--5002: + + --t0 2017-09-01 --t1 2017-09-30 s--0000-0001--0000-0000-0000-1111--5001 s--0000-0001--0000-0000-0000-1111--5002 + + +""" +import sys +import argparse +import getpass +import logging +from pprint import pprint +from datetime import datetime +from dateutil.parser import parse as dt_parse + +from iotile_cloud.api.connection import Api +from iotile_cloud.stream.report import AccumulationReportGenerator + +PRODUCTION_DOMAIN_NAME = 'https://iotile.cloud' + +logger = logging.getLogger(__name__) + + +if __name__ == '__main__': + # Test + # Logger Format + logging.basicConfig(level=logging.DEBUG, + format='[%(asctime)-15s] %(levelname)-6s %(message)s', + datefmt='%d/%b/%Y %H:%M:%S') + + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument('-u', '--user', dest='email', type=str, help='Email used for login') + parser.add_argument('--t0', dest='t0', type=str, help='Start Date') + parser.add_argument('--t1', dest='t1', type=str, help='End Date') + + parser.add_argument('sources', metavar='sources', nargs='+', type=str, help='Report source (projects, devices, streams)') + + args = parser.parse_args() + logger.info('--------------') + + if not args.email: + logger.error('User email is required: --user') + sys.exit(1) + + if not args.t0: + logger.error('Start Date is required: --t0') + sys.exit(1) + + try: + t0 = dt_parse(args.t0) + except Exception as e: + logger.error(e) + sys.exit(1) + + if not args.t1: + t1 = datetime.utcnow() + else: + try: + t1 = dt_parse(args.t1) + except Exception as e: + logger.error(e) + sys.exit(1) + + password = getpass.getpass() + + domain = PRODUCTION_DOMAIN_NAME + + logger.info('Using Server: {0}'.format(domain)) + c = Api(domain) + + ok = c.login(email=args.email, password=password) + if ok: + logger.info('Welcome {0}'.format(args.email)) + + gen = AccumulationReportGenerator(c) + stats =gen.compute_sum(sources=args.sources, start=t0, end=t1) + + pprint(stats) + + logger.info('Goodbye!!') + c.logout() diff --git a/tests/api.py b/tests/test_api.py similarity index 100% rename from tests/api.py rename to tests/test_api.py diff --git a/tests/gid.py b/tests/test_gid.py similarity index 100% rename from tests/gid.py rename to tests/test_gid.py diff --git a/tests/test_reports.py b/tests/test_reports.py new file mode 100644 index 0000000..1cbb902 --- /dev/null +++ b/tests/test_reports.py @@ -0,0 +1,53 @@ +import unittest2 as unittest +import json +import re +import mock +import requests +import requests_mock +from dateutil.parser import parse as dt_parse + +from iotile_cloud.utils.gid import * +from iotile_cloud.stream.report import * + + +class ReportGenerationTestCase(unittest.TestCase): + _payload1 = { + 'count': 2, + 'results': [ + {'slug': 's--0000-0001--0000-0000-0000-0002--5001'}, + {'slug': 's--0000-0001--0000-0000-0000-0002--5002'}, + ] + } + + @requests_mock.Mocker() + def test_source_factories_project(self, m): + api = Api(domain='http://iotile.test') + m.get('http://iotile.test/api/v1/stream/?project=p--0000-0001', text=json.dumps(self._payload1)) + + rg = BaseReportGenerator(api) + rg._fetch_streams_from_project_slug('p--0001') + self.assertEqual(len(rg._streams), 2) + + @requests_mock.Mocker() + def test_source_factories_project(self, m): + api = Api(domain='http://iotile.test') + + m.get('http://iotile.test/api/v1/stream/?device=d--0000-0000-0000-0002', text=json.dumps(self._payload1)) + + rg = BaseReportGenerator(api) + rg._fetch_streams_from_device_slug('d--0002') + self.assertEqual(len(rg._streams), 2) + + @requests_mock.Mocker() + def test_source_factories_project(self, m): + api = Api(domain='http://iotile.test') + payload = { + 'slug': 's--0000-0001--0000-0000-0000-0002--5001' + } + + m.get('http://iotile.test/api/v1/stream/s--0000-0001--0000-0000-0000-0002--5001/', text=json.dumps(payload)) + + rg = BaseReportGenerator(api) + rg._fetch_stream_from_slug('s--0000-0001--0000-0000-0000-0002--5001') + self.assertEqual(len(rg._streams), 1) + self.assertEqual(rg._streams[0]['slug'], 's--0000-0001--0000-0000-0000-0002--5001') diff --git a/tests/resources.py b/tests/test_resources.py similarity index 100% rename from tests/resources.py rename to tests/test_resources.py diff --git a/tests/stream-data.py b/tests/test_stream_data.py similarity index 89% rename from tests/stream-data.py rename to tests/test_stream_data.py index 849b983..b49f2f6 100644 --- a/tests/stream-data.py +++ b/tests/test_stream_data.py @@ -53,17 +53,17 @@ def test_single_page_fetch(self, m): } m.get('http://iotile.test/api/v1/stream/s--0001/data/', text=json.dumps(payload)) - self.assertEqual(len(self.stream_data._data), 0) + self.assertEqual(len(self.stream_data.data), 0) self.stream_data.initialize_from_server(lastn=3) - self.assertEqual(len(self.stream_data._data), 2) + self.assertEqual(len(self.stream_data.data), 2) @requests_mock.Mocker() def test_multi_page_fetch(self, m): m.get('http://iotile.test/api/v1/stream/s--0001/data/', text=self._multi_page_callback) - self.assertEqual(len(self.stream_data._data), 0) + self.assertEqual(len(self.stream_data.data), 0) self.stream_data.initialize_from_server(lastn=6) - self.assertEqual(len(self.stream_data._data), 6) + self.assertEqual(len(self.stream_data.data), 6)