Skip to content

Commit

Permalink
Merge pull request #131 from lcmcninch/get-pages
Browse files Browse the repository at this point in the history
Implement a means of getting more than 1000 data points
  • Loading branch information
brentru authored Mar 10, 2022
2 parents 92093d4 + b533482 commit 06df42d
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 10 deletions.
64 changes: 56 additions & 8 deletions Adafruit_IO/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,18 @@
import json
import platform
import pkg_resources
import re
from urllib.parse import urlparse
from urllib.parse import parse_qs
# import logging

import requests

from .errors import RequestError, ThrottlingError
from .model import Data, Feed, Group, Dashboard, Block, Layout

DEFAULT_PAGE_LIMIT = 100

# set outgoing version, pulled from setup.py
version = pkg_resources.require("Adafruit_IO")[0].version
default_headers = {
Expand Down Expand Up @@ -61,6 +66,9 @@ def __init__(self, username, key, proxies=None, base_url='https://io.adafruit.co
# constructing the path.
self.base_url = base_url.rstrip('/')

# Store the last response of a get or post
self._last_response = None

@staticmethod
def to_red(data):
"""Hex color feed to red channel.
Expand Down Expand Up @@ -112,10 +120,12 @@ def _handle_error(response):
def _compose_url(self, path):
return '{0}/api/{1}/{2}/{3}'.format(self.base_url, 'v2', self.username, path)

def _get(self, path):
def _get(self, path, params=None):
response = requests.get(self._compose_url(path),
headers=self._headers({'X-AIO-Key': self.key}),
proxies=self.proxies)
proxies=self.proxies,
params=params)
self._last_response = response
self._handle_error(response)
return response.json()

Expand All @@ -125,6 +135,7 @@ def _post(self, path, data):
'Content-Type': 'application/json'}),
proxies=self.proxies,
data=json.dumps(data))
self._last_response = response
self._handle_error(response)
return response.json()

Expand All @@ -133,6 +144,7 @@ def _delete(self, path):
headers=self._headers({'X-AIO-Key': self.key,
'Content-Type': 'application/json'}),
proxies=self.proxies)
self._last_response = response
self._handle_error(response)

# Data functionality.
Expand Down Expand Up @@ -242,17 +254,53 @@ def receive_previous(self, feed):
path = "feeds/{0}/data/previous".format(feed)
return Data.from_dict(self._get(path))

def data(self, feed, data_id=None):
def data(self, feed, data_id=None, max_results=DEFAULT_PAGE_LIMIT):
"""Retrieve data from a feed. If data_id is not specified then all the data
for the feed will be returned in an array.
:param string feed: Name/Key/ID of Adafruit IO feed.
:param string data_id: ID of the piece of data to delete.
:param int max_results: The maximum number of results to return. To
return all data, set to None.
"""
if data_id is None:
path = "feeds/{0}/data".format(feed)
return list(map(Data.from_dict, self._get(path)))
path = "feeds/{0}/data/{1}".format(feed, data_id)
return Data.from_dict(self._get(path))
if max_results is None:
res = self._get(f'feeds/{feed}/details')
max_results = res['details']['data']['count']
if data_id:
path = "feeds/{0}/data/{1}".format(feed, data_id)
return Data.from_dict(self._get(path))

params = {'limit': max_results} if max_results else None
data = []
path = "feeds/{0}/data".format(feed)
while len(data) < max_results:
data.extend(list(map(Data.from_dict, self._get(path,
params=params))))
nlink = self.get_next_link()
if not nlink:
break
# Parse the link for the query parameters
params = parse_qs(urlparse(nlink).query)
if max_results:
params['limit'] = max_results - len(data)
return data

def get_next_link(self):
"""Parse the `next` page URL in the pagination Link header.
This is necessary because of a bug in the API's implementation of the
link header. If that bug is fixed, the link would be accesible by
response.links['next']['url'] and this method would be broken.
:return: The url for the next page of data
:rtype: str
"""
if not self._last_response:
return
link_header = self._last_response.headers['link']
res = re.search('rel="next", <(.+?)>', link_header)
if not res:
return
return res.groups()[0]

def create_data(self, feed, data):
"""Create a new row of data in the specified feed.
Expand Down
13 changes: 13 additions & 0 deletions docs/data.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,19 @@ You can get all of the data for a feed by using the ``data(feed)`` method. The r
for d in data:
print('Data value: {0}'.format(d.value))
By default, the maximum number of data points returned is 1000. This limit can be changed by using the max_results parameter.

.. code-block:: python
# Get less than the default number of data points
data = aio.data('Test', max_results=100)
# Get more than the default number of data points
data = aio.data('Test', max_results=2000)
# Get all of the points
data = aio.data('Test', max_results=None)
You can also get a specific value by ID by using the ``feeds(feed, data_id)`` method. This will return a single piece of feed data with the provided data ID if it exists in the feed. The returned object will be an instance of the Data class.


Expand Down
4 changes: 2 additions & 2 deletions tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def ensure_block_deleted(self, client, dashboard, block):

def empty_feed(self, client, feed):
# Remove all the data from a specified feed (but don't delete the feed).
data = client.data(feed)
data = client.data(feed, max_results=None)
for d in data:
client.delete(feed, d.id)

Expand Down Expand Up @@ -406,4 +406,4 @@ def test_layout_update_layout(self):


if __name__ == "__main__":
unittest.main()
unittest.main()

0 comments on commit 06df42d

Please sign in to comment.