Skip to content

Commit

Permalink
Merge pull request #16 from RWTH-EBC/15-Adding-new-QL-endpoints
Browse files Browse the repository at this point in the history
Add new QL endpoints of /v2/attr and /v2/attr/{attr_name}
  • Loading branch information
djs0109 authored Jun 18, 2024
2 parents 8bb5f95 + e50f00f commit 0e4290c
Show file tree
Hide file tree
Showing 3 changed files with 153 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
- add: validation for JEXL based expression ([#260](https://github.com/RWTH-EBC/FiLiP/pull/260))
- add: tutorials for multi-entity ([#260](https://github.com/RWTH-EBC/FiLiP/pull/260))
- add: add ``update_entity_relationships`` to allow relationship update ([#271](https://github.com/RWTH-EBC/FiLiP/pull/271))
- add: timeseries query with all attrs and specific attr name ([#16](https://github.com/RWTH-EBC/FiLiP/pull/16))
- add: flag to determine the deletion of registration when clearing the CB ([#267](https://github.com/RWTH-EBC/FiLiP/pull/267))
- fix: rework tutorials for pydantic v2 ([#259](https://github.com/RWTH-EBC/FiLiP/pull/259))
- fix: inconsistency of `entity_type` as required argument ([#188](https://github.com/RWTH-EBC/FiLiP/pull/188))
Expand Down
125 changes: 122 additions & 3 deletions filip/clients/ngsi_v2/quantumleap.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time
from math import inf
from collections import deque
from itertools import count
from itertools import count,chain
from typing import Dict, List, Union, Deque, Optional
from urllib.parse import urljoin
import requests
Expand All @@ -22,7 +22,7 @@
AttributeValues, \
TimeSeries, \
TimeSeriesHeader
from filip.utils.validators import validate_http_url


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -1097,5 +1097,124 @@ def get_entity_attr_values_by_type(self,

for new, old in zip(chunk, res):
old.extend(new)

return res

# v2/attrs
def get_entity_by_attrs(self, *,
entity_type: str = None,
from_date: str = None,
to_date: str = None,
limit: int = 10000,
offset: int = None
) -> List[TimeSeries]:
"""
Get list of timeseries data grouped by each existing attribute name.
The timeseries data include all entities corresponding to each
attribute name as well as the index and values of this attribute in
this entity.
Args:
entity_type (str): Comma-separated list of entity types whose data
are to be included in the response. Use only one (no comma)
when required. If used to resolve ambiguity for the given
entityId, make sure the given entityId exists for this
entityType.
from_date (str): The starting date and time (inclusive) from which
the context information is queried. Must be in ISO8601 format
(e.g., 2018-01-05T15:44:34)
to_date (str): The final date and time (inclusive) from which the
context information is queried. Must be in ISO8601 format
(e.g., 2018-01-05T15:44:34).
limit (int): Maximum number of results to be retrieved.
Default value : 10000
offset (int): Offset for the results.
Returns:
List of TimeSeriesEntities
"""
url = urljoin(self.base_url, 'v2/attrs')
res_q = self.__query_builder(url=url,
entity_type=entity_type,
from_date=from_date,
to_date=to_date,
limit=limit,
offset=offset)
first = res_q.popleft()

res = chain.from_iterable(map(lambda x: self.transform_attr_response_model(x),
first.get("attrs")))
for chunk in res_q:
chunk = chain.from_iterable(map(lambda x: self.transform_attr_response_model(x),
chunk.get("attrs")))

for new, old in zip(chunk, res):
old.extend(new)

return list(res)

# v2/attrs/{attr_name}
def get_entity_by_attr_name(self, *,
attr_name: str,
entity_type: str = None,
from_date: str = None,
to_date: str = None,
limit: int = 10000,
offset: int = None
) -> List[TimeSeries]:
"""
Get list of all entities containing this attribute name, as well as
getting the index and values of this attribute in every corresponding
entity.
Args:
attr_name (str): The attribute name in interest.
entity_type (str): Comma-separated list of entity types whose data
are to be included in the response. Use only one (no comma)
when required. If used to resolve ambiguity for the given
entityId, make sure the given entityId exists for this
entityType.
from_date (str): The starting date and time (inclusive) from which
the context information is queried. Must be in ISO8601 format
(e.g., 2018-01-05T15:44:34)
to_date (str): The final date and time (inclusive) from which the
context information is queried. Must be in ISO8601 format
(e.g., 2018-01-05T15:44:34).
limit (int): Maximum number of results to be retrieved.
Default value : 10000
offset (int): Offset for the results.
Returns:
List of TimeSeries
"""
url = urljoin(self.base_url, f'/v2/attrs/{attr_name}')
res_q = self.__query_builder(url=url,
entity_type=entity_type,
from_date=from_date,
to_date=to_date,
limit=limit,
offset=offset)

first = res_q.popleft()
res = self.transform_attr_response_model(first)

for chunk in res_q:
chunk = self.transform_attr_response_model(chunk)
for new, old in zip(chunk, res):
old.extend(new)
return list(res)

def transform_attr_response_model(self, attr_response):
res = []
attr_name = attr_response.get("attrName")
for entity_group in attr_response.get("types"):
timeseries = map(lambda entity:
TimeSeries(entityId=entity.get("entityId"),
entityType=entity_group.get("entityType"),
index=entity.get("index"),
attributes=[
AttributeValues(attrName=attr_name,
values=entity.get("values"))]
),
entity_group.get("entities"))
res.append(timeseries)
return chain.from_iterable(res)
30 changes: 30 additions & 0 deletions tests/clients/test_ngsi_v2_timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,36 @@ def test_test_query_endpoints_with_args(self) -> None:
self.assertGreater(old_records.index[0],
records.index[0])
old_records = records

def test_attr_endpoints(self) -> None:
"""
Test get entity by attr/attr name endpoints
Returns:
None
"""
with QuantumLeapClient(
url=settings.QL_URL,
fiware_header=FiwareHeader(service='filip',
service_path="/static")) \
as client:
attr_names = ['temperature', 'humidity', 'co2']
for attr_name in attr_names:
entities_by_attr_name = client.get_entity_by_attr_name(
attr_name=attr_name)
# we expect as many timeseries as there are unique ids
self.assertEqual(len(entities_by_attr_name), 2)

# we expect the sizes of the index and attribute values to be the same
for timeseries in entities_by_attr_name:
for attribute in timeseries.attributes:
self.assertEqual(len(attribute.values), len(timeseries.index))

entities_by_attr = client.get_entity_by_attrs()
# we expect as many timeseries as : n of unique ids x n of different attrs
self.assertEqual(len(entities_by_attr), 2*3)
for timeseries in entities_by_attr:
for attribute in timeseries.attributes:
self.assertEqual(len(attribute.values), len(timeseries.index))

def tearDown(self) -> None:
"""
Expand Down

0 comments on commit 0e4290c

Please sign in to comment.