diff --git a/.travis.yml b/.travis.yml index 24956ef..ed0a66a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,6 +3,7 @@ sudo: false python: - '2.7' - '3.4' +- '3.5' - '3.6' install: - pip install --upgrade tox-travis -r requirements-build.txt -r requirements-test.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 07bc341..5fe774b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +### v0.6.0 (2017-08-06) + + * Improved StreamData, now supporting any filtering options + * New RawData for easy quering of the /api/v1/data/ API + ### v0.5.1 (2017-05-10) * Add Api function to refresh token diff --git a/example.py b/example.py index 20d0165..501ee68 100644 --- a/example.py +++ b/example.py @@ -4,24 +4,19 @@ import sys from iotile_cloud.api.connection import Api -from iotile_cloud.stream.data import StreamData +from iotile_cloud.stream.data import StreamData, RawData from iotile_cloud.api.exceptions import HttpNotFoundError -from logging import StreamHandler, Formatter +logging.basicConfig(level=logging.DEBUG, + format='[%(asctime)-15s] %(levelname)-6s %(message)s', + datefmt='%d/%b/%Y %H:%M:%S') logger = logging.getLogger(__name__) -FORMAT = '[%(asctime)-15s] %(levelname)-6s %(message)s' -DATE_FORMAT = '%d/%b/%Y %H:%M:%S' -formatter = Formatter(fmt=FORMAT, datefmt=DATE_FORMAT) -handler = StreamHandler() -handler.setFormatter(formatter) -logger.addHandler(handler) -logger.setLevel(logging.DEBUG) parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-u', '--user', dest='email', type=str, help='Email used for login') -parser.add_argument('--id', dest='stream_id', type=str, help='ID of stream definition to get') +parser.add_argument('-s', '--stream', dest='stream', type=str, help='Stream ID') args = parser.parse_args() logger.info('--------------') @@ -37,9 +32,16 @@ ok = c.login(email=args.email, password=password) if ok: - # GET Data - my_organizations = c.org.get() - for org in my_organizations['results']: + """ + Example for calling a GET: https://iotile.cloud/api/v1/org/ + And for each returned Organization, calling: https://iotile.cloud/api/v1/org//projects/ + Other examples: + all_my_projects = c.project.get() + all_my_devices = c.device.get() + all_my_streams = c.stream.get() + """ + all_my_organizations = c.org.get() + for org in all_my_organizations['results']: logger.info('I am a member of {0}'.format(org['name'])) org_projects = c.org(org['slug']).projects.get() for proj in org_projects['results']: @@ -47,14 +49,21 @@ logger.info('------------------------------') - if args.stream_id: - stream_data = StreamData(args.stream_id, c) + """ + Example for using the StreamData class to query the last 10 data points for + a given stream. For 10 items, this would be equivalent to just calling: + stream_data = c.stream(args.stream).data.get(lastn=10) + but StreamData is useful when getting more than 1K points, where you need + to recursively fetch each page (1K at a time). + """ + if args.stream: + stream_data = StreamData(args.stream, c) try: - stream_data.initialize_from_server(lastn=100) + stream_data.initialize_from_server(lastn=10) except HttpNotFoundError as e: logger.error(e) for item in stream_data.data: - logger.info('{0}: {1}'.format(item['timestamp'], item['value'])) + logger.info('{0}: {1}'.format(item['timestamp'], item['output_value'])) logger.info('------------------------------') diff --git a/iotile_cloud/stream/data.py b/iotile_cloud/stream/data.py index d6eb0ea..68225a1 100644 --- a/iotile_cloud/stream/data.py +++ b/iotile_cloud/stream/data.py @@ -7,24 +7,20 @@ logger = logging.getLogger(__name__) -class StreamData(object): + +class BaseData(object): data = [] - stream_id = None - api = None + _api = None - def __init__(self, stream_id, api): - self.stream_id = stream_id - self.api = api + def __init__(self, api): + self._api = api - def _get_args_dict(self, page, start=None, end=None, lastn=None): + def _get_args_dict(self, page, *args, **kwargs): parts = {} + for key in kwargs.keys(): + parts[key] = kwargs[key] + parts['page'] = page - if start: - parts['start']='{0}'.format(start) - if end: - parts['end']='{0}'.format(end) - if lastn: - parts['lastn']='{0}'.format(lastn) return parts def _date_format(self, timestamp): @@ -35,26 +31,48 @@ def _date_format(self, timestamp): logger.error('Unable to parse timestamp (with parser): ' + str(e)) sys.exit(1) - def initialize_from_server(self, start=None, end=None, lastn=None): - logger.info('Downloading data for {0}'.format(self.stream_id)) + def _fetch_data(self, *args, **kwargs): + logger.error('Fetch Data not implemented') + return {} + + def initialize_from_server(self, *args, **kwargs): + logger.debug('Downloading data') page = 1 while page: - extra = self._get_args_dict(start=start, end=end, lastn=lastn, page=page) - logger.debug('{0} ===> Downloading: {1}'.format(page, extra)) - raw_data = self.api.stream(self.stream_id).data.get(**extra) - for item in raw_data['results']: - if not item['display_value']: - item['display_value'] = 0 - self.data.append({ - 'timestamp': self._date_format(item['timestamp']), - 'value': item['display_value'] - }) - if raw_data['next']: - logger.debug('Getting more: {0}'.format(raw_data['next'])) - page += 1 - else: - page = 0 - - logger.info('==================================') - logger.info('Downloaded a total of {0} records'.format(len(self.data))) - logger.info('==================================') + extra = self._get_args_dict(page=page, *args, **kwargs) + logger.info('{0} ===> Downloading: {1}'.format(page, extra)) + raw_data = self._fetch_data(**extra) + if 'results' in raw_data: + for item in raw_data['results']: + self.data.append(item) + if raw_data['next']: + logger.debug('Getting more: {0}'.format(raw_data['next'])) + page += 1 + else: + page = 0 + + logger.debug('==================================') + logger.debug('Downloaded a total of {0} records'.format(len(self.data))) + logger.debug('==================================') + + + + +class StreamData(BaseData): + _stream_id = None + + def __init__(self, stream_id, api): + super(StreamData, self).__init__(api) + self._stream_id = stream_id + + def _fetch_data(self, *args, **kwargs): + return self._api.stream(self._stream_id).data.get(**kwargs) + + +class RawData(BaseData): + + def __init__(self, api): + super(RawData, self).__init__(api) + + def _fetch_data(self, *args, **kwargs): + return self._api.data.get(**kwargs) diff --git a/tests/stream-data.py b/tests/stream-data.py index 068271a..6ddbd97 100644 --- a/tests/stream-data.py +++ b/tests/stream-data.py @@ -31,9 +31,9 @@ def _multi_page_callback(self, request, context): 'next': None, 'count': 3, 'results': [ - {'timestamp': '20170109T10:00:00', 'int_value': 10, 'display_value': '1'}, - {'timestamp': '20170109T10:00:01', 'int_value': 20, 'display_value': '2'}, - {'timestamp': '20170109T10:00:02', 'int_value': 30, 'display_value': '3'}, + {'timestamp': '20170109T10:00:00', 'value': 10, 'output_value': '1'}, + {'timestamp': '20170109T10:00:01', 'value': 20, 'output_value': '2'}, + {'timestamp': '20170109T10:00:02', 'value': 30, 'output_value': '3'}, ] } if page == '1': @@ -47,8 +47,8 @@ def test_single_page_fetch(self, m): 'next': None, 'count': 2, 'results': [ - {'timestamp': '20170109', 'int_value': 40, 'display_value': '4'}, - {'timestamp': '20170109', 'int_value': 50, 'display_value': '5'}, + {'timestamp': '20170109', 'value': 40, 'output_value': '4'}, + {'timestamp': '20170109', 'value': 50, 'output_value': '5'}, ] } m.get('http://iotile.test/api/v1/stream/s--0001/data/', text=json.dumps(payload)) diff --git a/tox.ini b/tox.ini index 1e2e97c..c235eba 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py27, py34, py36 +envlist = py27, py34, py35, py36 [testenv] deps = diff --git a/version.py b/version.py index d694a65..fabb7ea 100644 --- a/version.py +++ b/version.py @@ -1 +1 @@ -version = '0.5.1' \ No newline at end of file +version = '0.6.0' \ No newline at end of file