update
This commit is contained in:
@@ -0,0 +1,108 @@
|
||||
import redis
|
||||
from redis._parsers.helpers import bool_ok
|
||||
|
||||
from ..helpers import get_protocol_version, parse_to_list
|
||||
from .commands import (
|
||||
ALTER_CMD,
|
||||
CREATE_CMD,
|
||||
CREATERULE_CMD,
|
||||
DEL_CMD,
|
||||
DELETERULE_CMD,
|
||||
GET_CMD,
|
||||
INFO_CMD,
|
||||
MGET_CMD,
|
||||
MRANGE_CMD,
|
||||
MREVRANGE_CMD,
|
||||
QUERYINDEX_CMD,
|
||||
RANGE_CMD,
|
||||
REVRANGE_CMD,
|
||||
TimeSeriesCommands,
|
||||
)
|
||||
from .info import TSInfo
|
||||
from .utils import parse_get, parse_m_get, parse_m_range, parse_range
|
||||
|
||||
|
||||
class TimeSeries(TimeSeriesCommands):
|
||||
"""
|
||||
This class subclasses redis-py's `Redis` and implements RedisTimeSeries's
|
||||
commands (prefixed with "ts").
|
||||
The client allows to interact with RedisTimeSeries and use all of it's
|
||||
functionality.
|
||||
"""
|
||||
|
||||
def __init__(self, client=None, **kwargs):
|
||||
"""Create a new RedisTimeSeries client."""
|
||||
# Set the module commands' callbacks
|
||||
self._MODULE_CALLBACKS = {
|
||||
ALTER_CMD: bool_ok,
|
||||
CREATE_CMD: bool_ok,
|
||||
CREATERULE_CMD: bool_ok,
|
||||
DELETERULE_CMD: bool_ok,
|
||||
}
|
||||
|
||||
_RESP2_MODULE_CALLBACKS = {
|
||||
DEL_CMD: int,
|
||||
GET_CMD: parse_get,
|
||||
INFO_CMD: TSInfo,
|
||||
MGET_CMD: parse_m_get,
|
||||
MRANGE_CMD: parse_m_range,
|
||||
MREVRANGE_CMD: parse_m_range,
|
||||
RANGE_CMD: parse_range,
|
||||
REVRANGE_CMD: parse_range,
|
||||
QUERYINDEX_CMD: parse_to_list,
|
||||
}
|
||||
_RESP3_MODULE_CALLBACKS = {}
|
||||
|
||||
self.client = client
|
||||
self.execute_command = client.execute_command
|
||||
|
||||
if get_protocol_version(self.client) in ["3", 3]:
|
||||
self._MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS)
|
||||
else:
|
||||
self._MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS)
|
||||
|
||||
for k, v in self._MODULE_CALLBACKS.items():
|
||||
self.client.set_response_callback(k, v)
|
||||
|
||||
def pipeline(self, transaction=True, shard_hint=None):
|
||||
"""Creates a pipeline for the TimeSeries module, that can be used
|
||||
for executing only TimeSeries commands and core commands.
|
||||
|
||||
Usage example:
|
||||
|
||||
r = redis.Redis()
|
||||
pipe = r.ts().pipeline()
|
||||
for i in range(100):
|
||||
pipeline.add("with_pipeline", i, 1.1 * i)
|
||||
pipeline.execute()
|
||||
|
||||
"""
|
||||
if isinstance(self.client, redis.RedisCluster):
|
||||
p = ClusterPipeline(
|
||||
nodes_manager=self.client.nodes_manager,
|
||||
commands_parser=self.client.commands_parser,
|
||||
startup_nodes=self.client.nodes_manager.startup_nodes,
|
||||
result_callbacks=self.client.result_callbacks,
|
||||
cluster_response_callbacks=self.client.cluster_response_callbacks,
|
||||
cluster_error_retry_attempts=self.client.cluster_error_retry_attempts,
|
||||
read_from_replicas=self.client.read_from_replicas,
|
||||
reinitialize_steps=self.client.reinitialize_steps,
|
||||
lock=self.client._lock,
|
||||
)
|
||||
|
||||
else:
|
||||
p = Pipeline(
|
||||
connection_pool=self.client.connection_pool,
|
||||
response_callbacks=self._MODULE_CALLBACKS,
|
||||
transaction=transaction,
|
||||
shard_hint=shard_hint,
|
||||
)
|
||||
return p
|
||||
|
||||
|
||||
class ClusterPipeline(TimeSeriesCommands, redis.cluster.ClusterPipeline):
|
||||
"""Cluster pipeline for the module."""
|
||||
|
||||
|
||||
class Pipeline(TimeSeriesCommands, redis.client.Pipeline):
|
||||
"""Pipeline for the module."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,896 @@
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
from redis.exceptions import DataError
|
||||
from redis.typing import KeyT, Number
|
||||
|
||||
ADD_CMD = "TS.ADD"
|
||||
ALTER_CMD = "TS.ALTER"
|
||||
CREATERULE_CMD = "TS.CREATERULE"
|
||||
CREATE_CMD = "TS.CREATE"
|
||||
DECRBY_CMD = "TS.DECRBY"
|
||||
DELETERULE_CMD = "TS.DELETERULE"
|
||||
DEL_CMD = "TS.DEL"
|
||||
GET_CMD = "TS.GET"
|
||||
INCRBY_CMD = "TS.INCRBY"
|
||||
INFO_CMD = "TS.INFO"
|
||||
MADD_CMD = "TS.MADD"
|
||||
MGET_CMD = "TS.MGET"
|
||||
MRANGE_CMD = "TS.MRANGE"
|
||||
MREVRANGE_CMD = "TS.MREVRANGE"
|
||||
QUERYINDEX_CMD = "TS.QUERYINDEX"
|
||||
RANGE_CMD = "TS.RANGE"
|
||||
REVRANGE_CMD = "TS.REVRANGE"
|
||||
|
||||
|
||||
class TimeSeriesCommands:
|
||||
"""RedisTimeSeries Commands."""
|
||||
|
||||
def create(
|
||||
self,
|
||||
key: KeyT,
|
||||
retention_msecs: Optional[int] = None,
|
||||
uncompressed: Optional[bool] = False,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
chunk_size: Optional[int] = None,
|
||||
duplicate_policy: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Create a new time-series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key
|
||||
retention_msecs:
|
||||
Maximum age for samples compared to highest reported timestamp (in milliseconds).
|
||||
If None or 0 is passed then the series is not trimmed at all.
|
||||
uncompressed:
|
||||
Changes data storage from compressed (by default) to uncompressed
|
||||
labels:
|
||||
Set of label-value pairs that represent metadata labels of the key.
|
||||
chunk_size:
|
||||
Memory size, in bytes, allocated for each data chunk.
|
||||
Must be a multiple of 8 in the range [128 .. 1048576].
|
||||
duplicate_policy:
|
||||
Policy for handling multiple samples with identical timestamps.
|
||||
Can be one of:
|
||||
- 'block': an error will occur for any out of order sample.
|
||||
- 'first': ignore the new value.
|
||||
- 'last': override with latest value.
|
||||
- 'min': only override if the value is lower than the existing value.
|
||||
- 'max': only override if the value is higher than the existing value.
|
||||
|
||||
For more information: https://redis.io/commands/ts.create/
|
||||
""" # noqa
|
||||
params = [key]
|
||||
self._append_retention(params, retention_msecs)
|
||||
self._append_uncompressed(params, uncompressed)
|
||||
self._append_chunk_size(params, chunk_size)
|
||||
self._append_duplicate_policy(params, CREATE_CMD, duplicate_policy)
|
||||
self._append_labels(params, labels)
|
||||
|
||||
return self.execute_command(CREATE_CMD, *params)
|
||||
|
||||
def alter(
|
||||
self,
|
||||
key: KeyT,
|
||||
retention_msecs: Optional[int] = None,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
chunk_size: Optional[int] = None,
|
||||
duplicate_policy: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Update the retention, chunk size, duplicate policy, and labels of an existing
|
||||
time series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key
|
||||
retention_msecs:
|
||||
Maximum retention period, compared to maximal existing timestamp (in milliseconds).
|
||||
If None or 0 is passed then the series is not trimmed at all.
|
||||
labels:
|
||||
Set of label-value pairs that represent metadata labels of the key.
|
||||
chunk_size:
|
||||
Memory size, in bytes, allocated for each data chunk.
|
||||
Must be a multiple of 8 in the range [128 .. 1048576].
|
||||
duplicate_policy:
|
||||
Policy for handling multiple samples with identical timestamps.
|
||||
Can be one of:
|
||||
- 'block': an error will occur for any out of order sample.
|
||||
- 'first': ignore the new value.
|
||||
- 'last': override with latest value.
|
||||
- 'min': only override if the value is lower than the existing value.
|
||||
- 'max': only override if the value is higher than the existing value.
|
||||
|
||||
For more information: https://redis.io/commands/ts.alter/
|
||||
""" # noqa
|
||||
params = [key]
|
||||
self._append_retention(params, retention_msecs)
|
||||
self._append_chunk_size(params, chunk_size)
|
||||
self._append_duplicate_policy(params, ALTER_CMD, duplicate_policy)
|
||||
self._append_labels(params, labels)
|
||||
|
||||
return self.execute_command(ALTER_CMD, *params)
|
||||
|
||||
def add(
|
||||
self,
|
||||
key: KeyT,
|
||||
timestamp: Union[int, str],
|
||||
value: Number,
|
||||
retention_msecs: Optional[int] = None,
|
||||
uncompressed: Optional[bool] = False,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
chunk_size: Optional[int] = None,
|
||||
duplicate_policy: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Append (or create and append) a new sample to a time series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key
|
||||
timestamp:
|
||||
Timestamp of the sample. * can be used for automatic timestamp (using the system clock).
|
||||
value:
|
||||
Numeric data value of the sample
|
||||
retention_msecs:
|
||||
Maximum retention period, compared to maximal existing timestamp (in milliseconds).
|
||||
If None or 0 is passed then the series is not trimmed at all.
|
||||
uncompressed:
|
||||
Changes data storage from compressed (by default) to uncompressed
|
||||
labels:
|
||||
Set of label-value pairs that represent metadata labels of the key.
|
||||
chunk_size:
|
||||
Memory size, in bytes, allocated for each data chunk.
|
||||
Must be a multiple of 8 in the range [128 .. 1048576].
|
||||
duplicate_policy:
|
||||
Policy for handling multiple samples with identical timestamps.
|
||||
Can be one of:
|
||||
- 'block': an error will occur for any out of order sample.
|
||||
- 'first': ignore the new value.
|
||||
- 'last': override with latest value.
|
||||
- 'min': only override if the value is lower than the existing value.
|
||||
- 'max': only override if the value is higher than the existing value.
|
||||
|
||||
For more information: https://redis.io/commands/ts.add/
|
||||
""" # noqa
|
||||
params = [key, timestamp, value]
|
||||
self._append_retention(params, retention_msecs)
|
||||
self._append_uncompressed(params, uncompressed)
|
||||
self._append_chunk_size(params, chunk_size)
|
||||
self._append_duplicate_policy(params, ADD_CMD, duplicate_policy)
|
||||
self._append_labels(params, labels)
|
||||
|
||||
return self.execute_command(ADD_CMD, *params)
|
||||
|
||||
def madd(self, ktv_tuples: List[Tuple[KeyT, Union[int, str], Number]]):
|
||||
"""
|
||||
Append (or create and append) a new `value` to series
|
||||
`key` with `timestamp`.
|
||||
Expects a list of `tuples` as (`key`,`timestamp`, `value`).
|
||||
Return value is an array with timestamps of insertions.
|
||||
|
||||
For more information: https://redis.io/commands/ts.madd/
|
||||
""" # noqa
|
||||
params = []
|
||||
for ktv in ktv_tuples:
|
||||
params.extend(ktv)
|
||||
|
||||
return self.execute_command(MADD_CMD, *params)
|
||||
|
||||
def incrby(
|
||||
self,
|
||||
key: KeyT,
|
||||
value: Number,
|
||||
timestamp: Optional[Union[int, str]] = None,
|
||||
retention_msecs: Optional[int] = None,
|
||||
uncompressed: Optional[bool] = False,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
chunk_size: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
Increment (or create an time-series and increment) the latest sample's of a series.
|
||||
This command can be used as a counter or gauge that automatically gets history as a time series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key
|
||||
value:
|
||||
Numeric data value of the sample
|
||||
timestamp:
|
||||
Timestamp of the sample. * can be used for automatic timestamp (using the system clock).
|
||||
retention_msecs:
|
||||
Maximum age for samples compared to last event time (in milliseconds).
|
||||
If None or 0 is passed then the series is not trimmed at all.
|
||||
uncompressed:
|
||||
Changes data storage from compressed (by default) to uncompressed
|
||||
labels:
|
||||
Set of label-value pairs that represent metadata labels of the key.
|
||||
chunk_size:
|
||||
Memory size, in bytes, allocated for each data chunk.
|
||||
|
||||
For more information: https://redis.io/commands/ts.incrby/
|
||||
""" # noqa
|
||||
params = [key, value]
|
||||
self._append_timestamp(params, timestamp)
|
||||
self._append_retention(params, retention_msecs)
|
||||
self._append_uncompressed(params, uncompressed)
|
||||
self._append_chunk_size(params, chunk_size)
|
||||
self._append_labels(params, labels)
|
||||
|
||||
return self.execute_command(INCRBY_CMD, *params)
|
||||
|
||||
def decrby(
|
||||
self,
|
||||
key: KeyT,
|
||||
value: Number,
|
||||
timestamp: Optional[Union[int, str]] = None,
|
||||
retention_msecs: Optional[int] = None,
|
||||
uncompressed: Optional[bool] = False,
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
chunk_size: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
Decrement (or create an time-series and decrement) the latest sample's of a series.
|
||||
This command can be used as a counter or gauge that automatically gets history as a time series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key
|
||||
value:
|
||||
Numeric data value of the sample
|
||||
timestamp:
|
||||
Timestamp of the sample. * can be used for automatic timestamp (using the system clock).
|
||||
retention_msecs:
|
||||
Maximum age for samples compared to last event time (in milliseconds).
|
||||
If None or 0 is passed then the series is not trimmed at all.
|
||||
uncompressed:
|
||||
Changes data storage from compressed (by default) to uncompressed
|
||||
labels:
|
||||
Set of label-value pairs that represent metadata labels of the key.
|
||||
chunk_size:
|
||||
Memory size, in bytes, allocated for each data chunk.
|
||||
|
||||
For more information: https://redis.io/commands/ts.decrby/
|
||||
""" # noqa
|
||||
params = [key, value]
|
||||
self._append_timestamp(params, timestamp)
|
||||
self._append_retention(params, retention_msecs)
|
||||
self._append_uncompressed(params, uncompressed)
|
||||
self._append_chunk_size(params, chunk_size)
|
||||
self._append_labels(params, labels)
|
||||
|
||||
return self.execute_command(DECRBY_CMD, *params)
|
||||
|
||||
def delete(self, key: KeyT, from_time: int, to_time: int):
|
||||
"""
|
||||
Delete all samples between two timestamps for a given time series.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
time-series key.
|
||||
from_time:
|
||||
Start timestamp for the range deletion.
|
||||
to_time:
|
||||
End timestamp for the range deletion.
|
||||
|
||||
For more information: https://redis.io/commands/ts.del/
|
||||
""" # noqa
|
||||
return self.execute_command(DEL_CMD, key, from_time, to_time)
|
||||
|
||||
def createrule(
|
||||
self,
|
||||
source_key: KeyT,
|
||||
dest_key: KeyT,
|
||||
aggregation_type: str,
|
||||
bucket_size_msec: int,
|
||||
align_timestamp: Optional[int] = None,
|
||||
):
|
||||
"""
|
||||
Create a compaction rule from values added to `source_key` into `dest_key`.
|
||||
|
||||
Args:
|
||||
|
||||
source_key:
|
||||
Key name for source time series
|
||||
dest_key:
|
||||
Key name for destination (compacted) time series
|
||||
aggregation_type:
|
||||
Aggregation type: One of the following:
|
||||
[`avg`, `sum`, `min`, `max`, `range`, `count`, `first`, `last`, `std.p`,
|
||||
`std.s`, `var.p`, `var.s`, `twa`]
|
||||
bucket_size_msec:
|
||||
Duration of each bucket, in milliseconds
|
||||
align_timestamp:
|
||||
Assure that there is a bucket that starts at exactly align_timestamp and
|
||||
align all other buckets accordingly.
|
||||
|
||||
For more information: https://redis.io/commands/ts.createrule/
|
||||
""" # noqa
|
||||
params = [source_key, dest_key]
|
||||
self._append_aggregation(params, aggregation_type, bucket_size_msec)
|
||||
if align_timestamp is not None:
|
||||
params.append(align_timestamp)
|
||||
|
||||
return self.execute_command(CREATERULE_CMD, *params)
|
||||
|
||||
def deleterule(self, source_key: KeyT, dest_key: KeyT):
|
||||
"""
|
||||
Delete a compaction rule from `source_key` to `dest_key`..
|
||||
|
||||
For more information: https://redis.io/commands/ts.deleterule/
|
||||
""" # noqa
|
||||
return self.execute_command(DELETERULE_CMD, source_key, dest_key)
|
||||
|
||||
def __range_params(
|
||||
self,
|
||||
key: KeyT,
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
count: Optional[int],
|
||||
aggregation_type: Optional[str],
|
||||
bucket_size_msec: Optional[int],
|
||||
filter_by_ts: Optional[List[int]],
|
||||
filter_by_min_value: Optional[int],
|
||||
filter_by_max_value: Optional[int],
|
||||
align: Optional[Union[int, str]],
|
||||
latest: Optional[bool],
|
||||
bucket_timestamp: Optional[str],
|
||||
empty: Optional[bool],
|
||||
):
|
||||
"""Create TS.RANGE and TS.REVRANGE arguments."""
|
||||
params = [key, from_time, to_time]
|
||||
self._append_latest(params, latest)
|
||||
self._append_filer_by_ts(params, filter_by_ts)
|
||||
self._append_filer_by_value(params, filter_by_min_value, filter_by_max_value)
|
||||
self._append_count(params, count)
|
||||
self._append_align(params, align)
|
||||
self._append_aggregation(params, aggregation_type, bucket_size_msec)
|
||||
self._append_bucket_timestamp(params, bucket_timestamp)
|
||||
self._append_empty(params, empty)
|
||||
|
||||
return params
|
||||
|
||||
def range(
|
||||
self,
|
||||
key: KeyT,
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
count: Optional[int] = None,
|
||||
aggregation_type: Optional[str] = None,
|
||||
bucket_size_msec: Optional[int] = 0,
|
||||
filter_by_ts: Optional[List[int]] = None,
|
||||
filter_by_min_value: Optional[int] = None,
|
||||
filter_by_max_value: Optional[int] = None,
|
||||
align: Optional[Union[int, str]] = None,
|
||||
latest: Optional[bool] = False,
|
||||
bucket_timestamp: Optional[str] = None,
|
||||
empty: Optional[bool] = False,
|
||||
):
|
||||
"""
|
||||
Query a range in forward direction for a specific time-serie.
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
Key name for timeseries.
|
||||
from_time:
|
||||
Start timestamp for the range query. - can be used to express the minimum possible timestamp (0).
|
||||
to_time:
|
||||
End timestamp for range query, + can be used to express the maximum possible timestamp.
|
||||
count:
|
||||
Limits the number of returned samples.
|
||||
aggregation_type:
|
||||
Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`,
|
||||
`range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`]
|
||||
bucket_size_msec:
|
||||
Time bucket for aggregation in milliseconds.
|
||||
filter_by_ts:
|
||||
List of timestamps to filter the result by specific timestamps.
|
||||
filter_by_min_value:
|
||||
Filter result by minimum value (must mention also filter by_max_value).
|
||||
filter_by_max_value:
|
||||
Filter result by maximum value (must mention also filter by_min_value).
|
||||
align:
|
||||
Timestamp for alignment control for aggregation.
|
||||
latest:
|
||||
Used when a time series is a compaction, reports the compacted value of the
|
||||
latest possibly partial bucket
|
||||
bucket_timestamp:
|
||||
Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`,
|
||||
`high`, `~`, `mid`].
|
||||
empty:
|
||||
Reports aggregations for empty buckets.
|
||||
|
||||
For more information: https://redis.io/commands/ts.range/
|
||||
""" # noqa
|
||||
params = self.__range_params(
|
||||
key,
|
||||
from_time,
|
||||
to_time,
|
||||
count,
|
||||
aggregation_type,
|
||||
bucket_size_msec,
|
||||
filter_by_ts,
|
||||
filter_by_min_value,
|
||||
filter_by_max_value,
|
||||
align,
|
||||
latest,
|
||||
bucket_timestamp,
|
||||
empty,
|
||||
)
|
||||
return self.execute_command(RANGE_CMD, *params)
|
||||
|
||||
def revrange(
|
||||
self,
|
||||
key: KeyT,
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
count: Optional[int] = None,
|
||||
aggregation_type: Optional[str] = None,
|
||||
bucket_size_msec: Optional[int] = 0,
|
||||
filter_by_ts: Optional[List[int]] = None,
|
||||
filter_by_min_value: Optional[int] = None,
|
||||
filter_by_max_value: Optional[int] = None,
|
||||
align: Optional[Union[int, str]] = None,
|
||||
latest: Optional[bool] = False,
|
||||
bucket_timestamp: Optional[str] = None,
|
||||
empty: Optional[bool] = False,
|
||||
):
|
||||
"""
|
||||
Query a range in reverse direction for a specific time-series.
|
||||
|
||||
**Note**: This command is only available since RedisTimeSeries >= v1.4
|
||||
|
||||
Args:
|
||||
|
||||
key:
|
||||
Key name for timeseries.
|
||||
from_time:
|
||||
Start timestamp for the range query. - can be used to express the minimum possible timestamp (0).
|
||||
to_time:
|
||||
End timestamp for range query, + can be used to express the maximum possible timestamp.
|
||||
count:
|
||||
Limits the number of returned samples.
|
||||
aggregation_type:
|
||||
Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`,
|
||||
`range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`]
|
||||
bucket_size_msec:
|
||||
Time bucket for aggregation in milliseconds.
|
||||
filter_by_ts:
|
||||
List of timestamps to filter the result by specific timestamps.
|
||||
filter_by_min_value:
|
||||
Filter result by minimum value (must mention also filter_by_max_value).
|
||||
filter_by_max_value:
|
||||
Filter result by maximum value (must mention also filter_by_min_value).
|
||||
align:
|
||||
Timestamp for alignment control for aggregation.
|
||||
latest:
|
||||
Used when a time series is a compaction, reports the compacted value of the
|
||||
latest possibly partial bucket
|
||||
bucket_timestamp:
|
||||
Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`,
|
||||
`high`, `~`, `mid`].
|
||||
empty:
|
||||
Reports aggregations for empty buckets.
|
||||
|
||||
For more information: https://redis.io/commands/ts.revrange/
|
||||
""" # noqa
|
||||
params = self.__range_params(
|
||||
key,
|
||||
from_time,
|
||||
to_time,
|
||||
count,
|
||||
aggregation_type,
|
||||
bucket_size_msec,
|
||||
filter_by_ts,
|
||||
filter_by_min_value,
|
||||
filter_by_max_value,
|
||||
align,
|
||||
latest,
|
||||
bucket_timestamp,
|
||||
empty,
|
||||
)
|
||||
return self.execute_command(REVRANGE_CMD, *params)
|
||||
|
||||
def __mrange_params(
|
||||
self,
|
||||
aggregation_type: Optional[str],
|
||||
bucket_size_msec: Optional[int],
|
||||
count: Optional[int],
|
||||
filters: List[str],
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
with_labels: Optional[bool],
|
||||
filter_by_ts: Optional[List[int]],
|
||||
filter_by_min_value: Optional[int],
|
||||
filter_by_max_value: Optional[int],
|
||||
groupby: Optional[str],
|
||||
reduce: Optional[str],
|
||||
select_labels: Optional[List[str]],
|
||||
align: Optional[Union[int, str]],
|
||||
latest: Optional[bool],
|
||||
bucket_timestamp: Optional[str],
|
||||
empty: Optional[bool],
|
||||
):
|
||||
"""Create TS.MRANGE and TS.MREVRANGE arguments."""
|
||||
params = [from_time, to_time]
|
||||
self._append_latest(params, latest)
|
||||
self._append_filer_by_ts(params, filter_by_ts)
|
||||
self._append_filer_by_value(params, filter_by_min_value, filter_by_max_value)
|
||||
self._append_with_labels(params, with_labels, select_labels)
|
||||
self._append_count(params, count)
|
||||
self._append_align(params, align)
|
||||
self._append_aggregation(params, aggregation_type, bucket_size_msec)
|
||||
self._append_bucket_timestamp(params, bucket_timestamp)
|
||||
self._append_empty(params, empty)
|
||||
params.extend(["FILTER"])
|
||||
params += filters
|
||||
self._append_groupby_reduce(params, groupby, reduce)
|
||||
return params
|
||||
|
||||
def mrange(
|
||||
self,
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
filters: List[str],
|
||||
count: Optional[int] = None,
|
||||
aggregation_type: Optional[str] = None,
|
||||
bucket_size_msec: Optional[int] = 0,
|
||||
with_labels: Optional[bool] = False,
|
||||
filter_by_ts: Optional[List[int]] = None,
|
||||
filter_by_min_value: Optional[int] = None,
|
||||
filter_by_max_value: Optional[int] = None,
|
||||
groupby: Optional[str] = None,
|
||||
reduce: Optional[str] = None,
|
||||
select_labels: Optional[List[str]] = None,
|
||||
align: Optional[Union[int, str]] = None,
|
||||
latest: Optional[bool] = False,
|
||||
bucket_timestamp: Optional[str] = None,
|
||||
empty: Optional[bool] = False,
|
||||
):
|
||||
"""
|
||||
Query a range across multiple time-series by filters in forward direction.
|
||||
|
||||
Args:
|
||||
|
||||
from_time:
|
||||
Start timestamp for the range query. `-` can be used to express the minimum possible timestamp (0).
|
||||
to_time:
|
||||
End timestamp for range query, `+` can be used to express the maximum possible timestamp.
|
||||
filters:
|
||||
filter to match the time-series labels.
|
||||
count:
|
||||
Limits the number of returned samples.
|
||||
aggregation_type:
|
||||
Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`,
|
||||
`range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`]
|
||||
bucket_size_msec:
|
||||
Time bucket for aggregation in milliseconds.
|
||||
with_labels:
|
||||
Include in the reply all label-value pairs representing metadata labels of the time series.
|
||||
filter_by_ts:
|
||||
List of timestamps to filter the result by specific timestamps.
|
||||
filter_by_min_value:
|
||||
Filter result by minimum value (must mention also filter_by_max_value).
|
||||
filter_by_max_value:
|
||||
Filter result by maximum value (must mention also filter_by_min_value).
|
||||
groupby:
|
||||
Grouping by fields the results (must mention also reduce).
|
||||
reduce:
|
||||
Applying reducer functions on each group. Can be one of [`avg` `sum`, `min`,
|
||||
`max`, `range`, `count`, `std.p`, `std.s`, `var.p`, `var.s`].
|
||||
select_labels:
|
||||
Include in the reply only a subset of the key-value pair labels of a series.
|
||||
align:
|
||||
Timestamp for alignment control for aggregation.
|
||||
latest:
|
||||
Used when a time series is a compaction, reports the compacted
|
||||
value of the latest possibly partial bucket
|
||||
bucket_timestamp:
|
||||
Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`,
|
||||
`high`, `~`, `mid`].
|
||||
empty:
|
||||
Reports aggregations for empty buckets.
|
||||
|
||||
For more information: https://redis.io/commands/ts.mrange/
|
||||
""" # noqa
|
||||
params = self.__mrange_params(
|
||||
aggregation_type,
|
||||
bucket_size_msec,
|
||||
count,
|
||||
filters,
|
||||
from_time,
|
||||
to_time,
|
||||
with_labels,
|
||||
filter_by_ts,
|
||||
filter_by_min_value,
|
||||
filter_by_max_value,
|
||||
groupby,
|
||||
reduce,
|
||||
select_labels,
|
||||
align,
|
||||
latest,
|
||||
bucket_timestamp,
|
||||
empty,
|
||||
)
|
||||
|
||||
return self.execute_command(MRANGE_CMD, *params)
|
||||
|
||||
def mrevrange(
|
||||
self,
|
||||
from_time: Union[int, str],
|
||||
to_time: Union[int, str],
|
||||
filters: List[str],
|
||||
count: Optional[int] = None,
|
||||
aggregation_type: Optional[str] = None,
|
||||
bucket_size_msec: Optional[int] = 0,
|
||||
with_labels: Optional[bool] = False,
|
||||
filter_by_ts: Optional[List[int]] = None,
|
||||
filter_by_min_value: Optional[int] = None,
|
||||
filter_by_max_value: Optional[int] = None,
|
||||
groupby: Optional[str] = None,
|
||||
reduce: Optional[str] = None,
|
||||
select_labels: Optional[List[str]] = None,
|
||||
align: Optional[Union[int, str]] = None,
|
||||
latest: Optional[bool] = False,
|
||||
bucket_timestamp: Optional[str] = None,
|
||||
empty: Optional[bool] = False,
|
||||
):
|
||||
"""
|
||||
Query a range across multiple time-series by filters in reverse direction.
|
||||
|
||||
Args:
|
||||
|
||||
from_time:
|
||||
Start timestamp for the range query. - can be used to express the minimum possible timestamp (0).
|
||||
to_time:
|
||||
End timestamp for range query, + can be used to express the maximum possible timestamp.
|
||||
filters:
|
||||
Filter to match the time-series labels.
|
||||
count:
|
||||
Limits the number of returned samples.
|
||||
aggregation_type:
|
||||
Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`,
|
||||
`range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`]
|
||||
bucket_size_msec:
|
||||
Time bucket for aggregation in milliseconds.
|
||||
with_labels:
|
||||
Include in the reply all label-value pairs representing metadata labels of the time series.
|
||||
filter_by_ts:
|
||||
List of timestamps to filter the result by specific timestamps.
|
||||
filter_by_min_value:
|
||||
Filter result by minimum value (must mention also filter_by_max_value).
|
||||
filter_by_max_value:
|
||||
Filter result by maximum value (must mention also filter_by_min_value).
|
||||
groupby:
|
||||
Grouping by fields the results (must mention also reduce).
|
||||
reduce:
|
||||
Applying reducer functions on each group. Can be one of [`avg` `sum`, `min`,
|
||||
`max`, `range`, `count`, `std.p`, `std.s`, `var.p`, `var.s`].
|
||||
select_labels:
|
||||
Include in the reply only a subset of the key-value pair labels of a series.
|
||||
align:
|
||||
Timestamp for alignment control for aggregation.
|
||||
latest:
|
||||
Used when a time series is a compaction, reports the compacted
|
||||
value of the latest possibly partial bucket
|
||||
bucket_timestamp:
|
||||
Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`,
|
||||
`high`, `~`, `mid`].
|
||||
empty:
|
||||
Reports aggregations for empty buckets.
|
||||
|
||||
For more information: https://redis.io/commands/ts.mrevrange/
|
||||
""" # noqa
|
||||
params = self.__mrange_params(
|
||||
aggregation_type,
|
||||
bucket_size_msec,
|
||||
count,
|
||||
filters,
|
||||
from_time,
|
||||
to_time,
|
||||
with_labels,
|
||||
filter_by_ts,
|
||||
filter_by_min_value,
|
||||
filter_by_max_value,
|
||||
groupby,
|
||||
reduce,
|
||||
select_labels,
|
||||
align,
|
||||
latest,
|
||||
bucket_timestamp,
|
||||
empty,
|
||||
)
|
||||
|
||||
return self.execute_command(MREVRANGE_CMD, *params)
|
||||
|
||||
def get(self, key: KeyT, latest: Optional[bool] = False):
|
||||
"""# noqa
|
||||
Get the last sample of `key`.
|
||||
`latest` used when a time series is a compaction, reports the compacted
|
||||
value of the latest (possibly partial) bucket
|
||||
|
||||
For more information: https://redis.io/commands/ts.get/
|
||||
""" # noqa
|
||||
params = [key]
|
||||
self._append_latest(params, latest)
|
||||
return self.execute_command(GET_CMD, *params)
|
||||
|
||||
def mget(
|
||||
self,
|
||||
filters: List[str],
|
||||
with_labels: Optional[bool] = False,
|
||||
select_labels: Optional[List[str]] = None,
|
||||
latest: Optional[bool] = False,
|
||||
):
|
||||
"""# noqa
|
||||
Get the last samples matching the specific `filter`.
|
||||
|
||||
Args:
|
||||
|
||||
filters:
|
||||
Filter to match the time-series labels.
|
||||
with_labels:
|
||||
Include in the reply all label-value pairs representing metadata
|
||||
labels of the time series.
|
||||
select_labels:
|
||||
Include in the reply only a subset of the key-value pair labels of a series.
|
||||
latest:
|
||||
Used when a time series is a compaction, reports the compacted
|
||||
value of the latest possibly partial bucket
|
||||
|
||||
For more information: https://redis.io/commands/ts.mget/
|
||||
""" # noqa
|
||||
params = []
|
||||
self._append_latest(params, latest)
|
||||
self._append_with_labels(params, with_labels, select_labels)
|
||||
params.extend(["FILTER"])
|
||||
params += filters
|
||||
return self.execute_command(MGET_CMD, *params)
|
||||
|
||||
def info(self, key: KeyT):
|
||||
"""# noqa
|
||||
Get information of `key`.
|
||||
|
||||
For more information: https://redis.io/commands/ts.info/
|
||||
""" # noqa
|
||||
return self.execute_command(INFO_CMD, key)
|
||||
|
||||
def queryindex(self, filters: List[str]):
|
||||
"""# noqa
|
||||
Get all time series keys matching the `filter` list.
|
||||
|
||||
For more information: https://redis.io/commands/ts.queryindex/
|
||||
""" # noq
|
||||
return self.execute_command(QUERYINDEX_CMD, *filters)
|
||||
|
||||
@staticmethod
|
||||
def _append_uncompressed(params: List[str], uncompressed: Optional[bool]):
|
||||
"""Append UNCOMPRESSED tag to params."""
|
||||
if uncompressed:
|
||||
params.extend(["UNCOMPRESSED"])
|
||||
|
||||
@staticmethod
|
||||
def _append_with_labels(
|
||||
params: List[str],
|
||||
with_labels: Optional[bool],
|
||||
select_labels: Optional[List[str]],
|
||||
):
|
||||
"""Append labels behavior to params."""
|
||||
if with_labels and select_labels:
|
||||
raise DataError(
|
||||
"with_labels and select_labels cannot be provided together."
|
||||
)
|
||||
|
||||
if with_labels:
|
||||
params.extend(["WITHLABELS"])
|
||||
if select_labels:
|
||||
params.extend(["SELECTED_LABELS", *select_labels])
|
||||
|
||||
@staticmethod
|
||||
def _append_groupby_reduce(
|
||||
params: List[str], groupby: Optional[str], reduce: Optional[str]
|
||||
):
|
||||
"""Append GROUPBY REDUCE property to params."""
|
||||
if groupby is not None and reduce is not None:
|
||||
params.extend(["GROUPBY", groupby, "REDUCE", reduce.upper()])
|
||||
|
||||
@staticmethod
|
||||
def _append_retention(params: List[str], retention: Optional[int]):
|
||||
"""Append RETENTION property to params."""
|
||||
if retention is not None:
|
||||
params.extend(["RETENTION", retention])
|
||||
|
||||
@staticmethod
|
||||
def _append_labels(params: List[str], labels: Optional[List[str]]):
|
||||
"""Append LABELS property to params."""
|
||||
if labels:
|
||||
params.append("LABELS")
|
||||
for k, v in labels.items():
|
||||
params.extend([k, v])
|
||||
|
||||
@staticmethod
|
||||
def _append_count(params: List[str], count: Optional[int]):
|
||||
"""Append COUNT property to params."""
|
||||
if count is not None:
|
||||
params.extend(["COUNT", count])
|
||||
|
||||
@staticmethod
|
||||
def _append_timestamp(params: List[str], timestamp: Optional[int]):
|
||||
"""Append TIMESTAMP property to params."""
|
||||
if timestamp is not None:
|
||||
params.extend(["TIMESTAMP", timestamp])
|
||||
|
||||
@staticmethod
|
||||
def _append_align(params: List[str], align: Optional[Union[int, str]]):
|
||||
"""Append ALIGN property to params."""
|
||||
if align is not None:
|
||||
params.extend(["ALIGN", align])
|
||||
|
||||
@staticmethod
|
||||
def _append_aggregation(
|
||||
params: List[str],
|
||||
aggregation_type: Optional[str],
|
||||
bucket_size_msec: Optional[int],
|
||||
):
|
||||
"""Append AGGREGATION property to params."""
|
||||
if aggregation_type is not None:
|
||||
params.extend(["AGGREGATION", aggregation_type, bucket_size_msec])
|
||||
|
||||
@staticmethod
|
||||
def _append_chunk_size(params: List[str], chunk_size: Optional[int]):
|
||||
"""Append CHUNK_SIZE property to params."""
|
||||
if chunk_size is not None:
|
||||
params.extend(["CHUNK_SIZE", chunk_size])
|
||||
|
||||
@staticmethod
|
||||
def _append_duplicate_policy(
|
||||
params: List[str], command: Optional[str], duplicate_policy: Optional[str]
|
||||
):
|
||||
"""Append DUPLICATE_POLICY property to params on CREATE
|
||||
and ON_DUPLICATE on ADD.
|
||||
"""
|
||||
if duplicate_policy is not None:
|
||||
if command == "TS.ADD":
|
||||
params.extend(["ON_DUPLICATE", duplicate_policy])
|
||||
else:
|
||||
params.extend(["DUPLICATE_POLICY", duplicate_policy])
|
||||
|
||||
@staticmethod
|
||||
def _append_filer_by_ts(params: List[str], ts_list: Optional[List[int]]):
|
||||
"""Append FILTER_BY_TS property to params."""
|
||||
if ts_list is not None:
|
||||
params.extend(["FILTER_BY_TS", *ts_list])
|
||||
|
||||
@staticmethod
|
||||
def _append_filer_by_value(
|
||||
params: List[str], min_value: Optional[int], max_value: Optional[int]
|
||||
):
|
||||
"""Append FILTER_BY_VALUE property to params."""
|
||||
if min_value is not None and max_value is not None:
|
||||
params.extend(["FILTER_BY_VALUE", min_value, max_value])
|
||||
|
||||
@staticmethod
|
||||
def _append_latest(params: List[str], latest: Optional[bool]):
|
||||
"""Append LATEST property to params."""
|
||||
if latest:
|
||||
params.append("LATEST")
|
||||
|
||||
@staticmethod
|
||||
def _append_bucket_timestamp(params: List[str], bucket_timestamp: Optional[str]):
|
||||
"""Append BUCKET_TIMESTAMP property to params."""
|
||||
if bucket_timestamp is not None:
|
||||
params.extend(["BUCKETTIMESTAMP", bucket_timestamp])
|
||||
|
||||
@staticmethod
|
||||
def _append_empty(params: List[str], empty: Optional[bool]):
|
||||
"""Append EMPTY property to params."""
|
||||
if empty:
|
||||
params.append("EMPTY")
|
||||
@@ -0,0 +1,91 @@
|
||||
from ..helpers import nativestr
|
||||
from .utils import list_to_dict
|
||||
|
||||
|
||||
class TSInfo:
|
||||
"""
|
||||
Hold information and statistics on the time-series.
|
||||
Can be created using ``tsinfo`` command
|
||||
https://oss.redis.com/redistimeseries/commands/#tsinfo.
|
||||
"""
|
||||
|
||||
rules = []
|
||||
labels = []
|
||||
sourceKey = None
|
||||
chunk_count = None
|
||||
memory_usage = None
|
||||
total_samples = None
|
||||
retention_msecs = None
|
||||
last_time_stamp = None
|
||||
first_time_stamp = None
|
||||
|
||||
max_samples_per_chunk = None
|
||||
chunk_size = None
|
||||
duplicate_policy = None
|
||||
|
||||
def __init__(self, args):
|
||||
"""
|
||||
Hold information and statistics on the time-series.
|
||||
|
||||
The supported params that can be passed as args:
|
||||
|
||||
rules:
|
||||
A list of compaction rules of the time series.
|
||||
sourceKey:
|
||||
Key name for source time series in case the current series
|
||||
is a target of a rule.
|
||||
chunkCount:
|
||||
Number of Memory Chunks used for the time series.
|
||||
memoryUsage:
|
||||
Total number of bytes allocated for the time series.
|
||||
totalSamples:
|
||||
Total number of samples in the time series.
|
||||
labels:
|
||||
A list of label-value pairs that represent the metadata
|
||||
labels of the time series.
|
||||
retentionTime:
|
||||
Retention time, in milliseconds, for the time series.
|
||||
lastTimestamp:
|
||||
Last timestamp present in the time series.
|
||||
firstTimestamp:
|
||||
First timestamp present in the time series.
|
||||
maxSamplesPerChunk:
|
||||
Deprecated.
|
||||
chunkSize:
|
||||
Amount of memory, in bytes, allocated for data.
|
||||
duplicatePolicy:
|
||||
Policy that will define handling of duplicate samples.
|
||||
|
||||
Can read more about on
|
||||
https://oss.redis.com/redistimeseries/configuration/#duplicate_policy
|
||||
"""
|
||||
response = dict(zip(map(nativestr, args[::2]), args[1::2]))
|
||||
self.rules = response.get("rules")
|
||||
self.source_key = response.get("sourceKey")
|
||||
self.chunk_count = response.get("chunkCount")
|
||||
self.memory_usage = response.get("memoryUsage")
|
||||
self.total_samples = response.get("totalSamples")
|
||||
self.labels = list_to_dict(response.get("labels"))
|
||||
self.retention_msecs = response.get("retentionTime")
|
||||
self.last_timestamp = response.get("lastTimestamp")
|
||||
self.first_timestamp = response.get("firstTimestamp")
|
||||
if "maxSamplesPerChunk" in response:
|
||||
self.max_samples_per_chunk = response["maxSamplesPerChunk"]
|
||||
self.chunk_size = (
|
||||
self.max_samples_per_chunk * 16
|
||||
) # backward compatible changes
|
||||
if "chunkSize" in response:
|
||||
self.chunk_size = response["chunkSize"]
|
||||
if "duplicatePolicy" in response:
|
||||
self.duplicate_policy = response["duplicatePolicy"]
|
||||
if type(self.duplicate_policy) == bytes:
|
||||
self.duplicate_policy = self.duplicate_policy.decode()
|
||||
|
||||
def get(self, item):
|
||||
try:
|
||||
return self.__getitem__(item)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
def __getitem__(self, item):
|
||||
return getattr(self, item)
|
||||
@@ -0,0 +1,44 @@
|
||||
from ..helpers import nativestr
|
||||
|
||||
|
||||
def list_to_dict(aList):
|
||||
return {nativestr(aList[i][0]): nativestr(aList[i][1]) for i in range(len(aList))}
|
||||
|
||||
|
||||
def parse_range(response):
|
||||
"""Parse range response. Used by TS.RANGE and TS.REVRANGE."""
|
||||
return [tuple((r[0], float(r[1]))) for r in response]
|
||||
|
||||
|
||||
def parse_m_range(response):
|
||||
"""Parse multi range response. Used by TS.MRANGE and TS.MREVRANGE."""
|
||||
res = []
|
||||
for item in response:
|
||||
res.append({nativestr(item[0]): [list_to_dict(item[1]), parse_range(item[2])]})
|
||||
return sorted(res, key=lambda d: list(d.keys()))
|
||||
|
||||
|
||||
def parse_get(response):
|
||||
"""Parse get response. Used by TS.GET."""
|
||||
if not response:
|
||||
return None
|
||||
return int(response[0]), float(response[1])
|
||||
|
||||
|
||||
def parse_m_get(response):
|
||||
"""Parse multi get response. Used by TS.MGET."""
|
||||
res = []
|
||||
for item in response:
|
||||
if not item[2]:
|
||||
res.append({nativestr(item[0]): [list_to_dict(item[1]), None, None]})
|
||||
else:
|
||||
res.append(
|
||||
{
|
||||
nativestr(item[0]): [
|
||||
list_to_dict(item[1]),
|
||||
int(item[2][0]),
|
||||
float(item[2][1]),
|
||||
]
|
||||
}
|
||||
)
|
||||
return sorted(res, key=lambda d: list(d.keys()))
|
||||
Reference in New Issue
Block a user