venv added, updated
This commit is contained in:
@@ -0,0 +1 @@
|
||||
"""Utils package."""
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,101 @@
|
||||
"""Utils to get right Date parsing function."""
|
||||
import datetime
|
||||
from sys import version_info
|
||||
import threading
|
||||
from datetime import timezone as tz
|
||||
|
||||
from dateutil import parser
|
||||
|
||||
date_helper = None
|
||||
|
||||
lock_ = threading.Lock()
|
||||
|
||||
|
||||
class DateHelper:
|
||||
"""
|
||||
DateHelper to groups different implementations of date operations.
|
||||
|
||||
If you would like to serialize the query results to custom timezone, you can use following code:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from influxdb_client.client.util import date_utils
|
||||
from influxdb_client.client.util.date_utils import DateHelper
|
||||
import dateutil.parser
|
||||
from dateutil import tz
|
||||
|
||||
def parse_date(date_string: str):
|
||||
return dateutil.parser.parse(date_string).astimezone(tz.gettz('ETC/GMT+2'))
|
||||
|
||||
date_utils.date_helper = DateHelper()
|
||||
date_utils.date_helper.parse_date = parse_date
|
||||
"""
|
||||
|
||||
def __init__(self, timezone: datetime.tzinfo = tz.utc) -> None:
|
||||
"""
|
||||
Initialize defaults.
|
||||
|
||||
:param timezone: Default timezone used for serialization "datetime" without "tzinfo".
|
||||
Default value is "UTC".
|
||||
"""
|
||||
self.timezone = timezone
|
||||
|
||||
def parse_date(self, date_string: str):
|
||||
"""
|
||||
Parse string into Date or Timestamp.
|
||||
|
||||
:return: Returns a :class:`datetime.datetime` object or compliant implementation
|
||||
like :class:`class 'pandas._libs.tslibs.timestamps.Timestamp`
|
||||
"""
|
||||
pass
|
||||
|
||||
def to_nanoseconds(self, delta):
|
||||
"""
|
||||
Get number of nanoseconds in timedelta.
|
||||
|
||||
Solution comes from v1 client. Thx.
|
||||
https://github.com/influxdata/influxdb-python/pull/811
|
||||
"""
|
||||
nanoseconds_in_days = delta.days * 86400 * 10 ** 9
|
||||
nanoseconds_in_seconds = delta.seconds * 10 ** 9
|
||||
nanoseconds_in_micros = delta.microseconds * 10 ** 3
|
||||
|
||||
return nanoseconds_in_days + nanoseconds_in_seconds + nanoseconds_in_micros
|
||||
|
||||
def to_utc(self, value: datetime):
|
||||
"""
|
||||
Convert datetime to UTC timezone.
|
||||
|
||||
:param value: datetime
|
||||
:return: datetime in UTC
|
||||
"""
|
||||
if not value.tzinfo:
|
||||
return self.to_utc(value.replace(tzinfo=self.timezone))
|
||||
else:
|
||||
return value.astimezone(tz.utc)
|
||||
|
||||
|
||||
def get_date_helper() -> DateHelper:
|
||||
"""
|
||||
Return DateHelper with proper implementation.
|
||||
|
||||
If there is a 'ciso8601' than use 'ciso8601.parse_datetime' else
|
||||
use 'datetime.fromisoformat' (Python >= 3.11) or 'dateutil.parse' (Python < 3.11).
|
||||
"""
|
||||
global date_helper
|
||||
if date_helper is None:
|
||||
with lock_:
|
||||
# avoid duplicate initialization
|
||||
if date_helper is None:
|
||||
_date_helper = DateHelper()
|
||||
try:
|
||||
import ciso8601
|
||||
_date_helper.parse_date = ciso8601.parse_datetime
|
||||
except ModuleNotFoundError:
|
||||
if (version_info.major, version_info.minor) >= (3, 11):
|
||||
_date_helper.parse_date = datetime.datetime.fromisoformat
|
||||
else:
|
||||
_date_helper.parse_date = parser.parse
|
||||
date_helper = _date_helper
|
||||
|
||||
return date_helper
|
||||
@@ -0,0 +1,15 @@
|
||||
"""Pandas date utils."""
|
||||
from influxdb_client.client.util.date_utils import DateHelper
|
||||
from influxdb_client.extras import pd
|
||||
|
||||
|
||||
class PandasDateTimeHelper(DateHelper):
|
||||
"""DateHelper that use Pandas library with nanosecond precision."""
|
||||
|
||||
def parse_date(self, date_string: str):
|
||||
"""Parse date string into `class 'pandas._libs.tslibs.timestamps.Timestamp`."""
|
||||
return pd.to_datetime(date_string)
|
||||
|
||||
def to_nanoseconds(self, delta):
|
||||
"""Get number of nanoseconds with nanos precision."""
|
||||
return super().to_nanoseconds(delta) + (delta.nanoseconds if hasattr(delta, 'nanoseconds') else 0)
|
||||
@@ -0,0 +1,50 @@
|
||||
"""Functions to share utility across client classes."""
|
||||
from influxdb_client.rest import ApiException
|
||||
|
||||
|
||||
def _is_id(value):
|
||||
"""
|
||||
Check if the value is valid InfluxDB ID.
|
||||
|
||||
:param value: to check
|
||||
:return: True if provided parameter is valid InfluxDB ID.
|
||||
"""
|
||||
if value and len(value) == 16:
|
||||
try:
|
||||
int(value, 16)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def get_org_query_param(org, client, required_id=False):
|
||||
"""
|
||||
Get required type of Org query parameter.
|
||||
|
||||
:param str, Organization org: value provided as a parameter into API (optional)
|
||||
:param InfluxDBClient client: with default value for Org parameter
|
||||
:param bool required_id: true if the query param has to be a ID
|
||||
:return: request type of org query parameter or None
|
||||
"""
|
||||
_org = client.org if org is None else org
|
||||
if 'Organization' in type(_org).__name__:
|
||||
_org = _org.id
|
||||
if required_id and _org and not _is_id(_org):
|
||||
try:
|
||||
organizations = client.organizations_api().find_organizations(org=_org)
|
||||
if len(organizations) < 1:
|
||||
from influxdb_client.client.exceptions import InfluxDBError
|
||||
message = f"The client cannot find organization with name: '{_org}' " \
|
||||
"to determine their ID. Are you using token with sufficient permission?"
|
||||
raise InfluxDBError(response=None, message=message)
|
||||
return organizations[0].id
|
||||
except ApiException as e:
|
||||
if e.status == 404:
|
||||
from influxdb_client.client.exceptions import InfluxDBError
|
||||
message = f"The client cannot find organization with name: '{_org}' " \
|
||||
"to determine their ID."
|
||||
raise InfluxDBError(response=None, message=message)
|
||||
raise e
|
||||
|
||||
return _org
|
||||
@@ -0,0 +1,205 @@
|
||||
"""
|
||||
Helpers classes to make easier use the client in multiprocessing environment.
|
||||
|
||||
For more information how the multiprocessing works see Python's
|
||||
`reference docs <https://docs.python.org/3/library/multiprocessing.html>`_.
|
||||
"""
|
||||
import logging
|
||||
import multiprocessing
|
||||
|
||||
from influxdb_client import InfluxDBClient, WriteOptions
|
||||
from influxdb_client.client.exceptions import InfluxDBError
|
||||
|
||||
logger = logging.getLogger('influxdb_client.client.util.multiprocessing_helper')
|
||||
|
||||
|
||||
def _success_callback(conf: (str, str, str), data: str):
|
||||
"""Successfully writen batch."""
|
||||
logger.debug(f"Written batch: {conf}, data: {data}")
|
||||
|
||||
|
||||
def _error_callback(conf: (str, str, str), data: str, exception: InfluxDBError):
|
||||
"""Unsuccessfully writen batch."""
|
||||
logger.debug(f"Cannot write batch: {conf}, data: {data} due: {exception}")
|
||||
|
||||
|
||||
def _retry_callback(conf: (str, str, str), data: str, exception: InfluxDBError):
|
||||
"""Retryable error."""
|
||||
logger.debug(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}")
|
||||
|
||||
|
||||
class _PoisonPill:
|
||||
"""To notify process to terminate."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class MultiprocessingWriter(multiprocessing.Process):
|
||||
"""
|
||||
The Helper class to write data into InfluxDB in independent OS process.
|
||||
|
||||
Example:
|
||||
.. code-block:: python
|
||||
|
||||
from influxdb_client import WriteOptions
|
||||
from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter
|
||||
|
||||
|
||||
def main():
|
||||
writer = MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org",
|
||||
write_options=WriteOptions(batch_size=100))
|
||||
writer.start()
|
||||
|
||||
for x in range(1, 1000):
|
||||
writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}")
|
||||
|
||||
writer.__del__()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
How to use with context_manager:
|
||||
.. code-block:: python
|
||||
|
||||
from influxdb_client import WriteOptions
|
||||
from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter
|
||||
|
||||
|
||||
def main():
|
||||
with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org",
|
||||
write_options=WriteOptions(batch_size=100)) as writer:
|
||||
for x in range(1, 1000):
|
||||
writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
How to handle batch events:
|
||||
.. code-block:: python
|
||||
|
||||
from influxdb_client import WriteOptions
|
||||
from influxdb_client.client.exceptions import InfluxDBError
|
||||
from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter
|
||||
|
||||
|
||||
class BatchingCallback(object):
|
||||
|
||||
def success(self, conf: (str, str, str), data: str):
|
||||
print(f"Written batch: {conf}, data: {data}")
|
||||
|
||||
def error(self, conf: (str, str, str), data: str, exception: InfluxDBError):
|
||||
print(f"Cannot write batch: {conf}, data: {data} due: {exception}")
|
||||
|
||||
def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError):
|
||||
print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}")
|
||||
|
||||
|
||||
def main():
|
||||
callback = BatchingCallback()
|
||||
with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org",
|
||||
success_callback=callback.success,
|
||||
error_callback=callback.error,
|
||||
retry_callback=callback.retry) as writer:
|
||||
|
||||
for x in range(1, 1000):
|
||||
writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
"""
|
||||
|
||||
__started__ = False
|
||||
__disposed__ = False
|
||||
|
||||
def __init__(self, **kwargs) -> None:
|
||||
"""
|
||||
Initialize defaults.
|
||||
|
||||
For more information how to initialize the writer see the examples above.
|
||||
|
||||
:param kwargs: arguments are passed into ``__init__`` function of ``InfluxDBClient`` and ``write_api``.
|
||||
"""
|
||||
multiprocessing.Process.__init__(self)
|
||||
self.kwargs = kwargs
|
||||
self.client = None
|
||||
self.write_api = None
|
||||
self.queue_ = multiprocessing.Manager().Queue()
|
||||
|
||||
def write(self, **kwargs) -> None:
|
||||
"""
|
||||
Append time-series data into underlying queue.
|
||||
|
||||
For more information how to pass arguments see the examples above.
|
||||
|
||||
:param kwargs: arguments are passed into ``write`` function of ``WriteApi``
|
||||
:return: None
|
||||
"""
|
||||
assert self.__disposed__ is False, 'Cannot write data: the writer is closed.'
|
||||
assert self.__started__ is True, 'Cannot write data: the writer is not started.'
|
||||
self.queue_.put(kwargs)
|
||||
|
||||
def run(self):
|
||||
"""Initialize ``InfluxDBClient`` and waits for data to writes into InfluxDB."""
|
||||
# Initialize Client and Write API
|
||||
self.client = InfluxDBClient(**self.kwargs)
|
||||
self.write_api = self.client.write_api(write_options=self.kwargs.get('write_options', WriteOptions()),
|
||||
success_callback=self.kwargs.get('success_callback', _success_callback),
|
||||
error_callback=self.kwargs.get('error_callback', _error_callback),
|
||||
retry_callback=self.kwargs.get('retry_callback', _retry_callback))
|
||||
# Infinite loop - until poison pill
|
||||
while True:
|
||||
next_record = self.queue_.get()
|
||||
if type(next_record) is _PoisonPill:
|
||||
# Poison pill means break the loop
|
||||
self.terminate()
|
||||
self.queue_.task_done()
|
||||
break
|
||||
self.write_api.write(**next_record)
|
||||
self.queue_.task_done()
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start independent process for writing data into InfluxDB."""
|
||||
super().start()
|
||||
self.__started__ = True
|
||||
|
||||
def terminate(self) -> None:
|
||||
"""
|
||||
Cleanup resources in independent process.
|
||||
|
||||
This function **cannot be used** to terminate the ``MultiprocessingWriter``.
|
||||
If you want to finish your writes please call: ``__del__``.
|
||||
"""
|
||||
if self.write_api:
|
||||
logger.info("flushing data...")
|
||||
self.write_api.__del__()
|
||||
self.write_api = None
|
||||
if self.client:
|
||||
self.client.__del__()
|
||||
self.client = None
|
||||
logger.info("closed")
|
||||
|
||||
def __enter__(self):
|
||||
"""Enter the runtime context related to this object."""
|
||||
self.start()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
"""Exit the runtime context related to this object."""
|
||||
self.__del__()
|
||||
|
||||
def __del__(self):
|
||||
"""Dispose the client and write_api."""
|
||||
if self.__started__:
|
||||
self.queue_.put(_PoisonPill())
|
||||
self.queue_.join()
|
||||
self.join()
|
||||
self.queue_ = None
|
||||
self.__started__ = False
|
||||
self.__disposed__ = True
|
||||
Reference in New Issue
Block a user