Updates
This commit is contained in:
@@ -0,0 +1,27 @@
|
||||
from .base import (
|
||||
AsyncPushNotificationsParser,
|
||||
BaseParser,
|
||||
PushNotificationsParser,
|
||||
_AsyncRESPBase,
|
||||
)
|
||||
from .commands import AsyncCommandsParser, CommandsParser
|
||||
from .encoders import Encoder
|
||||
from .hiredis import _AsyncHiredisParser, _HiredisParser
|
||||
from .resp2 import _AsyncRESP2Parser, _RESP2Parser
|
||||
from .resp3 import _AsyncRESP3Parser, _RESP3Parser
|
||||
|
||||
__all__ = [
|
||||
"AsyncCommandsParser",
|
||||
"_AsyncHiredisParser",
|
||||
"_AsyncRESPBase",
|
||||
"_AsyncRESP2Parser",
|
||||
"_AsyncRESP3Parser",
|
||||
"AsyncPushNotificationsParser",
|
||||
"CommandsParser",
|
||||
"Encoder",
|
||||
"BaseParser",
|
||||
"_HiredisParser",
|
||||
"_RESP2Parser",
|
||||
"_RESP3Parser",
|
||||
"PushNotificationsParser",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
289
ETB-API/venv/lib/python3.12/site-packages/redis/_parsers/base.py
Normal file
289
ETB-API/venv/lib/python3.12/site-packages/redis/_parsers/base.py
Normal file
@@ -0,0 +1,289 @@
|
||||
import sys
|
||||
from abc import ABC
|
||||
from asyncio import IncompleteReadError, StreamReader, TimeoutError
|
||||
from typing import Callable, List, Optional, Protocol, Union
|
||||
|
||||
if sys.version_info.major >= 3 and sys.version_info.minor >= 11:
|
||||
from asyncio import timeout as async_timeout
|
||||
else:
|
||||
from async_timeout import timeout as async_timeout
|
||||
|
||||
from ..exceptions import (
|
||||
AskError,
|
||||
AuthenticationError,
|
||||
AuthenticationWrongNumberOfArgsError,
|
||||
BusyLoadingError,
|
||||
ClusterCrossSlotError,
|
||||
ClusterDownError,
|
||||
ConnectionError,
|
||||
ExecAbortError,
|
||||
MasterDownError,
|
||||
ModuleError,
|
||||
MovedError,
|
||||
NoPermissionError,
|
||||
NoScriptError,
|
||||
OutOfMemoryError,
|
||||
ReadOnlyError,
|
||||
RedisError,
|
||||
ResponseError,
|
||||
TryAgainError,
|
||||
)
|
||||
from ..typing import EncodableT
|
||||
from .encoders import Encoder
|
||||
from .socket import SERVER_CLOSED_CONNECTION_ERROR, SocketBuffer
|
||||
|
||||
MODULE_LOAD_ERROR = "Error loading the extension. Please check the server logs."
|
||||
NO_SUCH_MODULE_ERROR = "Error unloading module: no such module with that name"
|
||||
MODULE_UNLOAD_NOT_POSSIBLE_ERROR = "Error unloading module: operation not possible."
|
||||
MODULE_EXPORTS_DATA_TYPES_ERROR = (
|
||||
"Error unloading module: the module "
|
||||
"exports one or more module-side data "
|
||||
"types, can't unload"
|
||||
)
|
||||
# user send an AUTH cmd to a server without authorization configured
|
||||
NO_AUTH_SET_ERROR = {
|
||||
# Redis >= 6.0
|
||||
"AUTH <password> called without any password "
|
||||
"configured for the default user. Are you sure "
|
||||
"your configuration is correct?": AuthenticationError,
|
||||
# Redis < 6.0
|
||||
"Client sent AUTH, but no password is set": AuthenticationError,
|
||||
}
|
||||
|
||||
|
||||
class BaseParser(ABC):
|
||||
EXCEPTION_CLASSES = {
|
||||
"ERR": {
|
||||
"max number of clients reached": ConnectionError,
|
||||
"invalid password": AuthenticationError,
|
||||
# some Redis server versions report invalid command syntax
|
||||
# in lowercase
|
||||
"wrong number of arguments "
|
||||
"for 'auth' command": AuthenticationWrongNumberOfArgsError,
|
||||
# some Redis server versions report invalid command syntax
|
||||
# in uppercase
|
||||
"wrong number of arguments "
|
||||
"for 'AUTH' command": AuthenticationWrongNumberOfArgsError,
|
||||
MODULE_LOAD_ERROR: ModuleError,
|
||||
MODULE_EXPORTS_DATA_TYPES_ERROR: ModuleError,
|
||||
NO_SUCH_MODULE_ERROR: ModuleError,
|
||||
MODULE_UNLOAD_NOT_POSSIBLE_ERROR: ModuleError,
|
||||
**NO_AUTH_SET_ERROR,
|
||||
},
|
||||
"OOM": OutOfMemoryError,
|
||||
"WRONGPASS": AuthenticationError,
|
||||
"EXECABORT": ExecAbortError,
|
||||
"LOADING": BusyLoadingError,
|
||||
"NOSCRIPT": NoScriptError,
|
||||
"READONLY": ReadOnlyError,
|
||||
"NOAUTH": AuthenticationError,
|
||||
"NOPERM": NoPermissionError,
|
||||
"ASK": AskError,
|
||||
"TRYAGAIN": TryAgainError,
|
||||
"MOVED": MovedError,
|
||||
"CLUSTERDOWN": ClusterDownError,
|
||||
"CROSSSLOT": ClusterCrossSlotError,
|
||||
"MASTERDOWN": MasterDownError,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def parse_error(cls, response):
|
||||
"Parse an error response"
|
||||
error_code = response.split(" ")[0]
|
||||
if error_code in cls.EXCEPTION_CLASSES:
|
||||
response = response[len(error_code) + 1 :]
|
||||
exception_class = cls.EXCEPTION_CLASSES[error_code]
|
||||
if isinstance(exception_class, dict):
|
||||
exception_class = exception_class.get(response, ResponseError)
|
||||
return exception_class(response)
|
||||
return ResponseError(response)
|
||||
|
||||
def on_disconnect(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def on_connect(self, connection):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class _RESPBase(BaseParser):
|
||||
"""Base class for sync-based resp parsing"""
|
||||
|
||||
def __init__(self, socket_read_size):
|
||||
self.socket_read_size = socket_read_size
|
||||
self.encoder = None
|
||||
self._sock = None
|
||||
self._buffer = None
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.on_disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def on_connect(self, connection):
|
||||
"Called when the socket connects"
|
||||
self._sock = connection._sock
|
||||
self._buffer = SocketBuffer(
|
||||
self._sock, self.socket_read_size, connection.socket_timeout
|
||||
)
|
||||
self.encoder = connection.encoder
|
||||
|
||||
def on_disconnect(self):
|
||||
"Called when the socket disconnects"
|
||||
self._sock = None
|
||||
if self._buffer is not None:
|
||||
self._buffer.close()
|
||||
self._buffer = None
|
||||
self.encoder = None
|
||||
|
||||
def can_read(self, timeout):
|
||||
return self._buffer and self._buffer.can_read(timeout)
|
||||
|
||||
|
||||
class AsyncBaseParser(BaseParser):
|
||||
"""Base parsing class for the python-backed async parser"""
|
||||
|
||||
__slots__ = "_stream", "_read_size"
|
||||
|
||||
def __init__(self, socket_read_size: int):
|
||||
self._stream: Optional[StreamReader] = None
|
||||
self._read_size = socket_read_size
|
||||
|
||||
async def can_read_destructive(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
async def read_response(
|
||||
self, disable_decoding: bool = False
|
||||
) -> Union[EncodableT, ResponseError, None, List[EncodableT]]:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
_INVALIDATION_MESSAGE = [b"invalidate", "invalidate"]
|
||||
|
||||
|
||||
class PushNotificationsParser(Protocol):
|
||||
"""Protocol defining RESP3-specific parsing functionality"""
|
||||
|
||||
pubsub_push_handler_func: Callable
|
||||
invalidation_push_handler_func: Optional[Callable] = None
|
||||
|
||||
def handle_pubsub_push_response(self, response):
|
||||
"""Handle pubsub push responses"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def handle_push_response(self, response, **kwargs):
|
||||
if response[0] not in _INVALIDATION_MESSAGE:
|
||||
return self.pubsub_push_handler_func(response)
|
||||
if self.invalidation_push_handler_func:
|
||||
return self.invalidation_push_handler_func(response)
|
||||
|
||||
def set_pubsub_push_handler(self, pubsub_push_handler_func):
|
||||
self.pubsub_push_handler_func = pubsub_push_handler_func
|
||||
|
||||
def set_invalidation_push_handler(self, invalidation_push_handler_func):
|
||||
self.invalidation_push_handler_func = invalidation_push_handler_func
|
||||
|
||||
|
||||
class AsyncPushNotificationsParser(Protocol):
|
||||
"""Protocol defining async RESP3-specific parsing functionality"""
|
||||
|
||||
pubsub_push_handler_func: Callable
|
||||
invalidation_push_handler_func: Optional[Callable] = None
|
||||
|
||||
async def handle_pubsub_push_response(self, response):
|
||||
"""Handle pubsub push responses asynchronously"""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def handle_push_response(self, response, **kwargs):
|
||||
"""Handle push responses asynchronously"""
|
||||
if response[0] not in _INVALIDATION_MESSAGE:
|
||||
return await self.pubsub_push_handler_func(response)
|
||||
if self.invalidation_push_handler_func:
|
||||
return await self.invalidation_push_handler_func(response)
|
||||
|
||||
def set_pubsub_push_handler(self, pubsub_push_handler_func):
|
||||
"""Set the pubsub push handler function"""
|
||||
self.pubsub_push_handler_func = pubsub_push_handler_func
|
||||
|
||||
def set_invalidation_push_handler(self, invalidation_push_handler_func):
|
||||
"""Set the invalidation push handler function"""
|
||||
self.invalidation_push_handler_func = invalidation_push_handler_func
|
||||
|
||||
|
||||
class _AsyncRESPBase(AsyncBaseParser):
|
||||
"""Base class for async resp parsing"""
|
||||
|
||||
__slots__ = AsyncBaseParser.__slots__ + ("encoder", "_buffer", "_pos", "_chunks")
|
||||
|
||||
def __init__(self, socket_read_size: int):
|
||||
super().__init__(socket_read_size)
|
||||
self.encoder: Optional[Encoder] = None
|
||||
self._buffer = b""
|
||||
self._chunks = []
|
||||
self._pos = 0
|
||||
|
||||
def _clear(self):
|
||||
self._buffer = b""
|
||||
self._chunks.clear()
|
||||
|
||||
def on_connect(self, connection):
|
||||
"""Called when the stream connects"""
|
||||
self._stream = connection._reader
|
||||
if self._stream is None:
|
||||
raise RedisError("Buffer is closed.")
|
||||
self.encoder = connection.encoder
|
||||
self._clear()
|
||||
self._connected = True
|
||||
|
||||
def on_disconnect(self):
|
||||
"""Called when the stream disconnects"""
|
||||
self._connected = False
|
||||
|
||||
async def can_read_destructive(self) -> bool:
|
||||
if not self._connected:
|
||||
raise RedisError("Buffer is closed.")
|
||||
if self._buffer:
|
||||
return True
|
||||
try:
|
||||
async with async_timeout(0):
|
||||
return self._stream.at_eof()
|
||||
except TimeoutError:
|
||||
return False
|
||||
|
||||
async def _read(self, length: int) -> bytes:
|
||||
"""
|
||||
Read `length` bytes of data. These are assumed to be followed
|
||||
by a '\r\n' terminator which is subsequently discarded.
|
||||
"""
|
||||
want = length + 2
|
||||
end = self._pos + want
|
||||
if len(self._buffer) >= end:
|
||||
result = self._buffer[self._pos : end - 2]
|
||||
else:
|
||||
tail = self._buffer[self._pos :]
|
||||
try:
|
||||
data = await self._stream.readexactly(want - len(tail))
|
||||
except IncompleteReadError as error:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from error
|
||||
result = (tail + data)[:-2]
|
||||
self._chunks.append(data)
|
||||
self._pos += want
|
||||
return result
|
||||
|
||||
async def _readline(self) -> bytes:
|
||||
"""
|
||||
read an unknown number of bytes up to the next '\r\n'
|
||||
line separator, which is discarded.
|
||||
"""
|
||||
found = self._buffer.find(b"\r\n", self._pos)
|
||||
if found >= 0:
|
||||
result = self._buffer[self._pos : found]
|
||||
else:
|
||||
tail = self._buffer[self._pos :]
|
||||
data = await self._stream.readline()
|
||||
if not data.endswith(b"\r\n"):
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
result = (tail + data)[:-2]
|
||||
self._chunks.append(data)
|
||||
self._pos += len(result) + 2
|
||||
return result
|
||||
@@ -0,0 +1,281 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
|
||||
|
||||
from redis.exceptions import RedisError, ResponseError
|
||||
from redis.utils import str_if_bytes
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from redis.asyncio.cluster import ClusterNode
|
||||
|
||||
|
||||
class AbstractCommandsParser:
|
||||
def _get_pubsub_keys(self, *args):
|
||||
"""
|
||||
Get the keys from pubsub command.
|
||||
Although PubSub commands have predetermined key locations, they are not
|
||||
supported in the 'COMMAND's output, so the key positions are hardcoded
|
||||
in this method
|
||||
"""
|
||||
if len(args) < 2:
|
||||
# The command has no keys in it
|
||||
return None
|
||||
args = [str_if_bytes(arg) for arg in args]
|
||||
command = args[0].upper()
|
||||
keys = None
|
||||
if command == "PUBSUB":
|
||||
# the second argument is a part of the command name, e.g.
|
||||
# ['PUBSUB', 'NUMSUB', 'foo'].
|
||||
pubsub_type = args[1].upper()
|
||||
if pubsub_type in ["CHANNELS", "NUMSUB", "SHARDCHANNELS", "SHARDNUMSUB"]:
|
||||
keys = args[2:]
|
||||
elif command in ["SUBSCRIBE", "PSUBSCRIBE", "UNSUBSCRIBE", "PUNSUBSCRIBE"]:
|
||||
# format example:
|
||||
# SUBSCRIBE channel [channel ...]
|
||||
keys = list(args[1:])
|
||||
elif command in ["PUBLISH", "SPUBLISH"]:
|
||||
# format example:
|
||||
# PUBLISH channel message
|
||||
keys = [args[1]]
|
||||
return keys
|
||||
|
||||
def parse_subcommand(self, command, **options):
|
||||
cmd_dict = {}
|
||||
cmd_name = str_if_bytes(command[0])
|
||||
cmd_dict["name"] = cmd_name
|
||||
cmd_dict["arity"] = int(command[1])
|
||||
cmd_dict["flags"] = [str_if_bytes(flag) for flag in command[2]]
|
||||
cmd_dict["first_key_pos"] = command[3]
|
||||
cmd_dict["last_key_pos"] = command[4]
|
||||
cmd_dict["step_count"] = command[5]
|
||||
if len(command) > 7:
|
||||
cmd_dict["tips"] = command[7]
|
||||
cmd_dict["key_specifications"] = command[8]
|
||||
cmd_dict["subcommands"] = command[9]
|
||||
return cmd_dict
|
||||
|
||||
|
||||
class CommandsParser(AbstractCommandsParser):
|
||||
"""
|
||||
Parses Redis commands to get command keys.
|
||||
COMMAND output is used to determine key locations.
|
||||
Commands that do not have a predefined key location are flagged with
|
||||
'movablekeys', and these commands' keys are determined by the command
|
||||
'COMMAND GETKEYS'.
|
||||
"""
|
||||
|
||||
def __init__(self, redis_connection):
|
||||
self.commands = {}
|
||||
self.initialize(redis_connection)
|
||||
|
||||
def initialize(self, r):
|
||||
commands = r.command()
|
||||
uppercase_commands = []
|
||||
for cmd in commands:
|
||||
if any(x.isupper() for x in cmd):
|
||||
uppercase_commands.append(cmd)
|
||||
for cmd in uppercase_commands:
|
||||
commands[cmd.lower()] = commands.pop(cmd)
|
||||
self.commands = commands
|
||||
|
||||
# As soon as this PR is merged into Redis, we should reimplement
|
||||
# our logic to use COMMAND INFO changes to determine the key positions
|
||||
# https://github.com/redis/redis/pull/8324
|
||||
def get_keys(self, redis_conn, *args):
|
||||
"""
|
||||
Get the keys from the passed command.
|
||||
|
||||
NOTE: Due to a bug in redis<7.0, this function does not work properly
|
||||
for EVAL or EVALSHA when the `numkeys` arg is 0.
|
||||
- issue: https://github.com/redis/redis/issues/9493
|
||||
- fix: https://github.com/redis/redis/pull/9733
|
||||
|
||||
So, don't use this function with EVAL or EVALSHA.
|
||||
"""
|
||||
if len(args) < 2:
|
||||
# The command has no keys in it
|
||||
return None
|
||||
|
||||
cmd_name = args[0].lower()
|
||||
if cmd_name not in self.commands:
|
||||
# try to split the command name and to take only the main command,
|
||||
# e.g. 'memory' for 'memory usage'
|
||||
cmd_name_split = cmd_name.split()
|
||||
cmd_name = cmd_name_split[0]
|
||||
if cmd_name in self.commands:
|
||||
# save the splitted command to args
|
||||
args = cmd_name_split + list(args[1:])
|
||||
else:
|
||||
# We'll try to reinitialize the commands cache, if the engine
|
||||
# version has changed, the commands may not be current
|
||||
self.initialize(redis_conn)
|
||||
if cmd_name not in self.commands:
|
||||
raise RedisError(
|
||||
f"{cmd_name.upper()} command doesn't exist in Redis commands"
|
||||
)
|
||||
|
||||
command = self.commands.get(cmd_name)
|
||||
if "movablekeys" in command["flags"]:
|
||||
keys = self._get_moveable_keys(redis_conn, *args)
|
||||
elif "pubsub" in command["flags"] or command["name"] == "pubsub":
|
||||
keys = self._get_pubsub_keys(*args)
|
||||
else:
|
||||
if (
|
||||
command["step_count"] == 0
|
||||
and command["first_key_pos"] == 0
|
||||
and command["last_key_pos"] == 0
|
||||
):
|
||||
is_subcmd = False
|
||||
if "subcommands" in command:
|
||||
subcmd_name = f"{cmd_name}|{args[1].lower()}"
|
||||
for subcmd in command["subcommands"]:
|
||||
if str_if_bytes(subcmd[0]) == subcmd_name:
|
||||
command = self.parse_subcommand(subcmd)
|
||||
is_subcmd = True
|
||||
|
||||
# The command doesn't have keys in it
|
||||
if not is_subcmd:
|
||||
return None
|
||||
last_key_pos = command["last_key_pos"]
|
||||
if last_key_pos < 0:
|
||||
last_key_pos = len(args) - abs(last_key_pos)
|
||||
keys_pos = list(
|
||||
range(command["first_key_pos"], last_key_pos + 1, command["step_count"])
|
||||
)
|
||||
keys = [args[pos] for pos in keys_pos]
|
||||
|
||||
return keys
|
||||
|
||||
def _get_moveable_keys(self, redis_conn, *args):
|
||||
"""
|
||||
NOTE: Due to a bug in redis<7.0, this function does not work properly
|
||||
for EVAL or EVALSHA when the `numkeys` arg is 0.
|
||||
- issue: https://github.com/redis/redis/issues/9493
|
||||
- fix: https://github.com/redis/redis/pull/9733
|
||||
|
||||
So, don't use this function with EVAL or EVALSHA.
|
||||
"""
|
||||
# The command name should be splitted into separate arguments,
|
||||
# e.g. 'MEMORY USAGE' will be splitted into ['MEMORY', 'USAGE']
|
||||
pieces = args[0].split() + list(args[1:])
|
||||
try:
|
||||
keys = redis_conn.execute_command("COMMAND GETKEYS", *pieces)
|
||||
except ResponseError as e:
|
||||
message = e.__str__()
|
||||
if (
|
||||
"Invalid arguments" in message
|
||||
or "The command has no key arguments" in message
|
||||
):
|
||||
return None
|
||||
else:
|
||||
raise e
|
||||
return keys
|
||||
|
||||
|
||||
class AsyncCommandsParser(AbstractCommandsParser):
|
||||
"""
|
||||
Parses Redis commands to get command keys.
|
||||
|
||||
COMMAND output is used to determine key locations.
|
||||
Commands that do not have a predefined key location are flagged with 'movablekeys',
|
||||
and these commands' keys are determined by the command 'COMMAND GETKEYS'.
|
||||
|
||||
NOTE: Due to a bug in redis<7.0, this does not work properly
|
||||
for EVAL or EVALSHA when the `numkeys` arg is 0.
|
||||
- issue: https://github.com/redis/redis/issues/9493
|
||||
- fix: https://github.com/redis/redis/pull/9733
|
||||
|
||||
So, don't use this with EVAL or EVALSHA.
|
||||
"""
|
||||
|
||||
__slots__ = ("commands", "node")
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.commands: Dict[str, Union[int, Dict[str, Any]]] = {}
|
||||
|
||||
async def initialize(self, node: Optional["ClusterNode"] = None) -> None:
|
||||
if node:
|
||||
self.node = node
|
||||
|
||||
commands = await self.node.execute_command("COMMAND")
|
||||
self.commands = {cmd.lower(): command for cmd, command in commands.items()}
|
||||
|
||||
# As soon as this PR is merged into Redis, we should reimplement
|
||||
# our logic to use COMMAND INFO changes to determine the key positions
|
||||
# https://github.com/redis/redis/pull/8324
|
||||
async def get_keys(self, *args: Any) -> Optional[Tuple[str, ...]]:
|
||||
"""
|
||||
Get the keys from the passed command.
|
||||
|
||||
NOTE: Due to a bug in redis<7.0, this function does not work properly
|
||||
for EVAL or EVALSHA when the `numkeys` arg is 0.
|
||||
- issue: https://github.com/redis/redis/issues/9493
|
||||
- fix: https://github.com/redis/redis/pull/9733
|
||||
|
||||
So, don't use this function with EVAL or EVALSHA.
|
||||
"""
|
||||
if len(args) < 2:
|
||||
# The command has no keys in it
|
||||
return None
|
||||
|
||||
cmd_name = args[0].lower()
|
||||
if cmd_name not in self.commands:
|
||||
# try to split the command name and to take only the main command,
|
||||
# e.g. 'memory' for 'memory usage'
|
||||
cmd_name_split = cmd_name.split()
|
||||
cmd_name = cmd_name_split[0]
|
||||
if cmd_name in self.commands:
|
||||
# save the splitted command to args
|
||||
args = cmd_name_split + list(args[1:])
|
||||
else:
|
||||
# We'll try to reinitialize the commands cache, if the engine
|
||||
# version has changed, the commands may not be current
|
||||
await self.initialize()
|
||||
if cmd_name not in self.commands:
|
||||
raise RedisError(
|
||||
f"{cmd_name.upper()} command doesn't exist in Redis commands"
|
||||
)
|
||||
|
||||
command = self.commands.get(cmd_name)
|
||||
if "movablekeys" in command["flags"]:
|
||||
keys = await self._get_moveable_keys(*args)
|
||||
elif "pubsub" in command["flags"] or command["name"] == "pubsub":
|
||||
keys = self._get_pubsub_keys(*args)
|
||||
else:
|
||||
if (
|
||||
command["step_count"] == 0
|
||||
and command["first_key_pos"] == 0
|
||||
and command["last_key_pos"] == 0
|
||||
):
|
||||
is_subcmd = False
|
||||
if "subcommands" in command:
|
||||
subcmd_name = f"{cmd_name}|{args[1].lower()}"
|
||||
for subcmd in command["subcommands"]:
|
||||
if str_if_bytes(subcmd[0]) == subcmd_name:
|
||||
command = self.parse_subcommand(subcmd)
|
||||
is_subcmd = True
|
||||
|
||||
# The command doesn't have keys in it
|
||||
if not is_subcmd:
|
||||
return None
|
||||
last_key_pos = command["last_key_pos"]
|
||||
if last_key_pos < 0:
|
||||
last_key_pos = len(args) - abs(last_key_pos)
|
||||
keys_pos = list(
|
||||
range(command["first_key_pos"], last_key_pos + 1, command["step_count"])
|
||||
)
|
||||
keys = [args[pos] for pos in keys_pos]
|
||||
|
||||
return keys
|
||||
|
||||
async def _get_moveable_keys(self, *args: Any) -> Optional[Tuple[str, ...]]:
|
||||
try:
|
||||
keys = await self.node.execute_command("COMMAND GETKEYS", *args)
|
||||
except ResponseError as e:
|
||||
message = e.__str__()
|
||||
if (
|
||||
"Invalid arguments" in message
|
||||
or "The command has no key arguments" in message
|
||||
):
|
||||
return None
|
||||
else:
|
||||
raise e
|
||||
return keys
|
||||
@@ -0,0 +1,44 @@
|
||||
from ..exceptions import DataError
|
||||
|
||||
|
||||
class Encoder:
|
||||
"Encode strings to bytes-like and decode bytes-like to strings"
|
||||
|
||||
__slots__ = "encoding", "encoding_errors", "decode_responses"
|
||||
|
||||
def __init__(self, encoding, encoding_errors, decode_responses):
|
||||
self.encoding = encoding
|
||||
self.encoding_errors = encoding_errors
|
||||
self.decode_responses = decode_responses
|
||||
|
||||
def encode(self, value):
|
||||
"Return a bytestring or bytes-like representation of the value"
|
||||
if isinstance(value, (bytes, memoryview)):
|
||||
return value
|
||||
elif isinstance(value, bool):
|
||||
# special case bool since it is a subclass of int
|
||||
raise DataError(
|
||||
"Invalid input of type: 'bool'. Convert to a "
|
||||
"bytes, string, int or float first."
|
||||
)
|
||||
elif isinstance(value, (int, float)):
|
||||
value = repr(value).encode()
|
||||
elif not isinstance(value, str):
|
||||
# a value we don't know how to deal with. throw an error
|
||||
typename = type(value).__name__
|
||||
raise DataError(
|
||||
f"Invalid input of type: '{typename}'. "
|
||||
f"Convert to a bytes, string, int or float first."
|
||||
)
|
||||
if isinstance(value, str):
|
||||
value = value.encode(self.encoding, self.encoding_errors)
|
||||
return value
|
||||
|
||||
def decode(self, value, force=False):
|
||||
"Return a unicode string from the bytes-like representation"
|
||||
if self.decode_responses or force:
|
||||
if isinstance(value, memoryview):
|
||||
value = value.tobytes()
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(self.encoding, self.encoding_errors)
|
||||
return value
|
||||
@@ -0,0 +1,883 @@
|
||||
import datetime
|
||||
|
||||
from redis.utils import str_if_bytes
|
||||
|
||||
|
||||
def timestamp_to_datetime(response):
|
||||
"Converts a unix timestamp to a Python datetime object"
|
||||
if not response:
|
||||
return None
|
||||
try:
|
||||
response = int(response)
|
||||
except ValueError:
|
||||
return None
|
||||
return datetime.datetime.fromtimestamp(response)
|
||||
|
||||
|
||||
def parse_debug_object(response):
|
||||
"Parse the results of Redis's DEBUG OBJECT command into a Python dict"
|
||||
# The 'type' of the object is the first item in the response, but isn't
|
||||
# prefixed with a name
|
||||
response = str_if_bytes(response)
|
||||
response = "type:" + response
|
||||
response = dict(kv.split(":") for kv in response.split())
|
||||
|
||||
# parse some expected int values from the string response
|
||||
# note: this cmd isn't spec'd so these may not appear in all redis versions
|
||||
int_fields = ("refcount", "serializedlength", "lru", "lru_seconds_idle")
|
||||
for field in int_fields:
|
||||
if field in response:
|
||||
response[field] = int(response[field])
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def parse_info(response):
|
||||
"""Parse the result of Redis's INFO command into a Python dict"""
|
||||
info = {}
|
||||
response = str_if_bytes(response)
|
||||
|
||||
def get_value(value):
|
||||
if "," not in value and "=" not in value:
|
||||
try:
|
||||
if "." in value:
|
||||
return float(value)
|
||||
else:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
return value
|
||||
elif "=" not in value:
|
||||
return [get_value(v) for v in value.split(",") if v]
|
||||
else:
|
||||
sub_dict = {}
|
||||
for item in value.split(","):
|
||||
if not item:
|
||||
continue
|
||||
if "=" in item:
|
||||
k, v = item.rsplit("=", 1)
|
||||
sub_dict[k] = get_value(v)
|
||||
else:
|
||||
sub_dict[item] = True
|
||||
return sub_dict
|
||||
|
||||
for line in response.splitlines():
|
||||
if line and not line.startswith("#"):
|
||||
if line.find(":") != -1:
|
||||
# Split, the info fields keys and values.
|
||||
# Note that the value may contain ':'. but the 'host:'
|
||||
# pseudo-command is the only case where the key contains ':'
|
||||
key, value = line.split(":", 1)
|
||||
if key == "cmdstat_host":
|
||||
key, value = line.rsplit(":", 1)
|
||||
|
||||
if key == "module":
|
||||
# Hardcode a list for key 'modules' since there could be
|
||||
# multiple lines that started with 'module'
|
||||
info.setdefault("modules", []).append(get_value(value))
|
||||
else:
|
||||
info[key] = get_value(value)
|
||||
else:
|
||||
# if the line isn't splittable, append it to the "__raw__" key
|
||||
info.setdefault("__raw__", []).append(line)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def parse_memory_stats(response, **kwargs):
|
||||
"""Parse the results of MEMORY STATS"""
|
||||
stats = pairs_to_dict(response, decode_keys=True, decode_string_values=True)
|
||||
for key, value in stats.items():
|
||||
if key.startswith("db.") and isinstance(value, list):
|
||||
stats[key] = pairs_to_dict(
|
||||
value, decode_keys=True, decode_string_values=True
|
||||
)
|
||||
return stats
|
||||
|
||||
|
||||
SENTINEL_STATE_TYPES = {
|
||||
"can-failover-its-master": int,
|
||||
"config-epoch": int,
|
||||
"down-after-milliseconds": int,
|
||||
"failover-timeout": int,
|
||||
"info-refresh": int,
|
||||
"last-hello-message": int,
|
||||
"last-ok-ping-reply": int,
|
||||
"last-ping-reply": int,
|
||||
"last-ping-sent": int,
|
||||
"master-link-down-time": int,
|
||||
"master-port": int,
|
||||
"num-other-sentinels": int,
|
||||
"num-slaves": int,
|
||||
"o-down-time": int,
|
||||
"pending-commands": int,
|
||||
"parallel-syncs": int,
|
||||
"port": int,
|
||||
"quorum": int,
|
||||
"role-reported-time": int,
|
||||
"s-down-time": int,
|
||||
"slave-priority": int,
|
||||
"slave-repl-offset": int,
|
||||
"voted-leader-epoch": int,
|
||||
}
|
||||
|
||||
|
||||
def parse_sentinel_state(item):
|
||||
result = pairs_to_dict_typed(item, SENTINEL_STATE_TYPES)
|
||||
flags = set(result["flags"].split(","))
|
||||
for name, flag in (
|
||||
("is_master", "master"),
|
||||
("is_slave", "slave"),
|
||||
("is_sdown", "s_down"),
|
||||
("is_odown", "o_down"),
|
||||
("is_sentinel", "sentinel"),
|
||||
("is_disconnected", "disconnected"),
|
||||
("is_master_down", "master_down"),
|
||||
):
|
||||
result[name] = flag in flags
|
||||
return result
|
||||
|
||||
|
||||
def parse_sentinel_master(response):
|
||||
return parse_sentinel_state(map(str_if_bytes, response))
|
||||
|
||||
|
||||
def parse_sentinel_state_resp3(response):
|
||||
result = {}
|
||||
for key in response:
|
||||
try:
|
||||
value = SENTINEL_STATE_TYPES[key](str_if_bytes(response[key]))
|
||||
result[str_if_bytes(key)] = value
|
||||
except Exception:
|
||||
result[str_if_bytes(key)] = response[str_if_bytes(key)]
|
||||
flags = set(result["flags"].split(","))
|
||||
result["flags"] = flags
|
||||
return result
|
||||
|
||||
|
||||
def parse_sentinel_masters(response):
|
||||
result = {}
|
||||
for item in response:
|
||||
state = parse_sentinel_state(map(str_if_bytes, item))
|
||||
result[state["name"]] = state
|
||||
return result
|
||||
|
||||
|
||||
def parse_sentinel_masters_resp3(response):
|
||||
return [parse_sentinel_state(master) for master in response]
|
||||
|
||||
|
||||
def parse_sentinel_slaves_and_sentinels(response):
|
||||
return [parse_sentinel_state(map(str_if_bytes, item)) for item in response]
|
||||
|
||||
|
||||
def parse_sentinel_slaves_and_sentinels_resp3(response):
|
||||
return [parse_sentinel_state_resp3(item) for item in response]
|
||||
|
||||
|
||||
def parse_sentinel_get_master(response):
|
||||
return response and (response[0], int(response[1])) or None
|
||||
|
||||
|
||||
def pairs_to_dict(response, decode_keys=False, decode_string_values=False):
|
||||
"""Create a dict given a list of key/value pairs"""
|
||||
if response is None:
|
||||
return {}
|
||||
if decode_keys or decode_string_values:
|
||||
# the iter form is faster, but I don't know how to make that work
|
||||
# with a str_if_bytes() map
|
||||
keys = response[::2]
|
||||
if decode_keys:
|
||||
keys = map(str_if_bytes, keys)
|
||||
values = response[1::2]
|
||||
if decode_string_values:
|
||||
values = map(str_if_bytes, values)
|
||||
return dict(zip(keys, values))
|
||||
else:
|
||||
it = iter(response)
|
||||
return dict(zip(it, it))
|
||||
|
||||
|
||||
def pairs_to_dict_typed(response, type_info):
|
||||
it = iter(response)
|
||||
result = {}
|
||||
for key, value in zip(it, it):
|
||||
if key in type_info:
|
||||
try:
|
||||
value = type_info[key](value)
|
||||
except Exception:
|
||||
# if for some reason the value can't be coerced, just use
|
||||
# the string value
|
||||
pass
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def zset_score_pairs(response, **options):
|
||||
"""
|
||||
If ``withscores`` is specified in the options, return the response as
|
||||
a list of (value, score) pairs
|
||||
"""
|
||||
if not response or not options.get("withscores"):
|
||||
return response
|
||||
score_cast_func = options.get("score_cast_func", float)
|
||||
it = iter(response)
|
||||
return list(zip(it, map(score_cast_func, it)))
|
||||
|
||||
|
||||
def sort_return_tuples(response, **options):
|
||||
"""
|
||||
If ``groups`` is specified, return the response as a list of
|
||||
n-element tuples with n being the value found in options['groups']
|
||||
"""
|
||||
if not response or not options.get("groups"):
|
||||
return response
|
||||
n = options["groups"]
|
||||
return list(zip(*[response[i::n] for i in range(n)]))
|
||||
|
||||
|
||||
def parse_stream_list(response):
|
||||
if response is None:
|
||||
return None
|
||||
data = []
|
||||
for r in response:
|
||||
if r is not None:
|
||||
data.append((r[0], pairs_to_dict(r[1])))
|
||||
else:
|
||||
data.append((None, None))
|
||||
return data
|
||||
|
||||
|
||||
def pairs_to_dict_with_str_keys(response):
|
||||
return pairs_to_dict(response, decode_keys=True)
|
||||
|
||||
|
||||
def parse_list_of_dicts(response):
|
||||
return list(map(pairs_to_dict_with_str_keys, response))
|
||||
|
||||
|
||||
def parse_xclaim(response, **options):
|
||||
if options.get("parse_justid", False):
|
||||
return response
|
||||
return parse_stream_list(response)
|
||||
|
||||
|
||||
def parse_xautoclaim(response, **options):
|
||||
if options.get("parse_justid", False):
|
||||
return response[1]
|
||||
response[1] = parse_stream_list(response[1])
|
||||
return response
|
||||
|
||||
|
||||
def parse_xinfo_stream(response, **options):
|
||||
if isinstance(response, list):
|
||||
data = pairs_to_dict(response, decode_keys=True)
|
||||
else:
|
||||
data = {str_if_bytes(k): v for k, v in response.items()}
|
||||
if not options.get("full", False):
|
||||
first = data.get("first-entry")
|
||||
if first is not None and first[0] is not None:
|
||||
data["first-entry"] = (first[0], pairs_to_dict(first[1]))
|
||||
last = data["last-entry"]
|
||||
if last is not None and last[0] is not None:
|
||||
data["last-entry"] = (last[0], pairs_to_dict(last[1]))
|
||||
else:
|
||||
data["entries"] = {_id: pairs_to_dict(entry) for _id, entry in data["entries"]}
|
||||
if len(data["groups"]) > 0 and isinstance(data["groups"][0], list):
|
||||
data["groups"] = [
|
||||
pairs_to_dict(group, decode_keys=True) for group in data["groups"]
|
||||
]
|
||||
for g in data["groups"]:
|
||||
if g["consumers"] and g["consumers"][0] is not None:
|
||||
g["consumers"] = [
|
||||
pairs_to_dict(c, decode_keys=True) for c in g["consumers"]
|
||||
]
|
||||
else:
|
||||
data["groups"] = [
|
||||
{str_if_bytes(k): v for k, v in group.items()}
|
||||
for group in data["groups"]
|
||||
]
|
||||
return data
|
||||
|
||||
|
||||
def parse_xread(response):
|
||||
if response is None:
|
||||
return []
|
||||
return [[r[0], parse_stream_list(r[1])] for r in response]
|
||||
|
||||
|
||||
def parse_xread_resp3(response):
|
||||
if response is None:
|
||||
return {}
|
||||
return {key: [parse_stream_list(value)] for key, value in response.items()}
|
||||
|
||||
|
||||
def parse_xpending(response, **options):
|
||||
if options.get("parse_detail", False):
|
||||
return parse_xpending_range(response)
|
||||
consumers = [{"name": n, "pending": int(p)} for n, p in response[3] or []]
|
||||
return {
|
||||
"pending": response[0],
|
||||
"min": response[1],
|
||||
"max": response[2],
|
||||
"consumers": consumers,
|
||||
}
|
||||
|
||||
|
||||
def parse_xpending_range(response):
|
||||
k = ("message_id", "consumer", "time_since_delivered", "times_delivered")
|
||||
return [dict(zip(k, r)) for r in response]
|
||||
|
||||
|
||||
def float_or_none(response):
|
||||
if response is None:
|
||||
return None
|
||||
return float(response)
|
||||
|
||||
|
||||
def bool_ok(response, **options):
|
||||
return str_if_bytes(response) == "OK"
|
||||
|
||||
|
||||
def parse_zadd(response, **options):
|
||||
if response is None:
|
||||
return None
|
||||
if options.get("as_score"):
|
||||
return float(response)
|
||||
return int(response)
|
||||
|
||||
|
||||
def parse_client_list(response, **options):
|
||||
clients = []
|
||||
for c in str_if_bytes(response).splitlines():
|
||||
# Values might contain '='
|
||||
clients.append(dict(pair.split("=", 1) for pair in c.split(" ")))
|
||||
return clients
|
||||
|
||||
|
||||
def parse_config_get(response, **options):
|
||||
response = [str_if_bytes(i) if i is not None else None for i in response]
|
||||
return response and pairs_to_dict(response) or {}
|
||||
|
||||
|
||||
def parse_scan(response, **options):
|
||||
cursor, r = response
|
||||
return int(cursor), r
|
||||
|
||||
|
||||
def parse_hscan(response, **options):
|
||||
cursor, r = response
|
||||
no_values = options.get("no_values", False)
|
||||
if no_values:
|
||||
payload = r or []
|
||||
else:
|
||||
payload = r and pairs_to_dict(r) or {}
|
||||
return int(cursor), payload
|
||||
|
||||
|
||||
def parse_zscan(response, **options):
|
||||
score_cast_func = options.get("score_cast_func", float)
|
||||
cursor, r = response
|
||||
it = iter(r)
|
||||
return int(cursor), list(zip(it, map(score_cast_func, it)))
|
||||
|
||||
|
||||
def parse_zmscore(response, **options):
|
||||
# zmscore: list of scores (double precision floating point number) or nil
|
||||
return [float(score) if score is not None else None for score in response]
|
||||
|
||||
|
||||
def parse_slowlog_get(response, **options):
|
||||
space = " " if options.get("decode_responses", False) else b" "
|
||||
|
||||
def parse_item(item):
|
||||
result = {"id": item[0], "start_time": int(item[1]), "duration": int(item[2])}
|
||||
# Redis Enterprise injects another entry at index [3], which has
|
||||
# the complexity info (i.e. the value N in case the command has
|
||||
# an O(N) complexity) instead of the command.
|
||||
if isinstance(item[3], list):
|
||||
result["command"] = space.join(item[3])
|
||||
|
||||
# These fields are optional, depends on environment.
|
||||
if len(item) >= 6:
|
||||
result["client_address"] = item[4]
|
||||
result["client_name"] = item[5]
|
||||
else:
|
||||
result["complexity"] = item[3]
|
||||
result["command"] = space.join(item[4])
|
||||
|
||||
# These fields are optional, depends on environment.
|
||||
if len(item) >= 7:
|
||||
result["client_address"] = item[5]
|
||||
result["client_name"] = item[6]
|
||||
|
||||
return result
|
||||
|
||||
return [parse_item(item) for item in response]
|
||||
|
||||
|
||||
def parse_stralgo(response, **options):
|
||||
"""
|
||||
Parse the response from `STRALGO` command.
|
||||
Without modifiers the returned value is string.
|
||||
When LEN is given the command returns the length of the result
|
||||
(i.e integer).
|
||||
When IDX is given the command returns a dictionary with the LCS
|
||||
length and all the ranges in both the strings, start and end
|
||||
offset for each string, where there are matches.
|
||||
When WITHMATCHLEN is given, each array representing a match will
|
||||
also have the length of the match at the beginning of the array.
|
||||
"""
|
||||
if options.get("len", False):
|
||||
return int(response)
|
||||
if options.get("idx", False):
|
||||
if options.get("withmatchlen", False):
|
||||
matches = [
|
||||
[(int(match[-1]))] + list(map(tuple, match[:-1]))
|
||||
for match in response[1]
|
||||
]
|
||||
else:
|
||||
matches = [list(map(tuple, match)) for match in response[1]]
|
||||
return {
|
||||
str_if_bytes(response[0]): matches,
|
||||
str_if_bytes(response[2]): int(response[3]),
|
||||
}
|
||||
return str_if_bytes(response)
|
||||
|
||||
|
||||
def parse_cluster_info(response, **options):
|
||||
response = str_if_bytes(response)
|
||||
return dict(line.split(":") for line in response.splitlines() if line)
|
||||
|
||||
|
||||
def _parse_node_line(line):
|
||||
line_items = line.split(" ")
|
||||
node_id, addr, flags, master_id, ping, pong, epoch, connected = line.split(" ")[:8]
|
||||
ip = addr.split("@")[0]
|
||||
hostname = addr.split("@")[1].split(",")[1] if "@" in addr and "," in addr else ""
|
||||
node_dict = {
|
||||
"node_id": node_id,
|
||||
"hostname": hostname,
|
||||
"flags": flags,
|
||||
"master_id": master_id,
|
||||
"last_ping_sent": ping,
|
||||
"last_pong_rcvd": pong,
|
||||
"epoch": epoch,
|
||||
"slots": [],
|
||||
"migrations": [],
|
||||
"connected": True if connected == "connected" else False,
|
||||
}
|
||||
if len(line_items) >= 9:
|
||||
slots, migrations = _parse_slots(line_items[8:])
|
||||
node_dict["slots"], node_dict["migrations"] = slots, migrations
|
||||
return ip, node_dict
|
||||
|
||||
|
||||
def _parse_slots(slot_ranges):
|
||||
slots, migrations = [], []
|
||||
for s_range in slot_ranges:
|
||||
if "->-" in s_range:
|
||||
slot_id, dst_node_id = s_range[1:-1].split("->-", 1)
|
||||
migrations.append(
|
||||
{"slot": slot_id, "node_id": dst_node_id, "state": "migrating"}
|
||||
)
|
||||
elif "-<-" in s_range:
|
||||
slot_id, src_node_id = s_range[1:-1].split("-<-", 1)
|
||||
migrations.append(
|
||||
{"slot": slot_id, "node_id": src_node_id, "state": "importing"}
|
||||
)
|
||||
else:
|
||||
s_range = [sl for sl in s_range.split("-")]
|
||||
slots.append(s_range)
|
||||
|
||||
return slots, migrations
|
||||
|
||||
|
||||
def parse_cluster_nodes(response, **options):
|
||||
"""
|
||||
@see: https://redis.io/commands/cluster-nodes # string / bytes
|
||||
@see: https://redis.io/commands/cluster-replicas # list of string / bytes
|
||||
"""
|
||||
if isinstance(response, (str, bytes)):
|
||||
response = response.splitlines()
|
||||
return dict(_parse_node_line(str_if_bytes(node)) for node in response)
|
||||
|
||||
|
||||
def parse_geosearch_generic(response, **options):
|
||||
"""
|
||||
Parse the response of 'GEOSEARCH', GEORADIUS' and 'GEORADIUSBYMEMBER'
|
||||
commands according to 'withdist', 'withhash' and 'withcoord' labels.
|
||||
"""
|
||||
try:
|
||||
if options["store"] or options["store_dist"]:
|
||||
# `store` and `store_dist` cant be combined
|
||||
# with other command arguments.
|
||||
# relevant to 'GEORADIUS' and 'GEORADIUSBYMEMBER'
|
||||
return response
|
||||
except KeyError: # it means the command was sent via execute_command
|
||||
return response
|
||||
|
||||
if not isinstance(response, list):
|
||||
response_list = [response]
|
||||
else:
|
||||
response_list = response
|
||||
|
||||
if not options["withdist"] and not options["withcoord"] and not options["withhash"]:
|
||||
# just a bunch of places
|
||||
return response_list
|
||||
|
||||
cast = {
|
||||
"withdist": float,
|
||||
"withcoord": lambda ll: (float(ll[0]), float(ll[1])),
|
||||
"withhash": int,
|
||||
}
|
||||
|
||||
# zip all output results with each casting function to get
|
||||
# the properly native Python value.
|
||||
f = [lambda x: x]
|
||||
f += [cast[o] for o in ["withdist", "withhash", "withcoord"] if options[o]]
|
||||
return [list(map(lambda fv: fv[0](fv[1]), zip(f, r))) for r in response_list]
|
||||
|
||||
|
||||
def parse_command(response, **options):
|
||||
commands = {}
|
||||
for command in response:
|
||||
cmd_dict = {}
|
||||
cmd_name = str_if_bytes(command[0])
|
||||
cmd_dict["name"] = cmd_name
|
||||
cmd_dict["arity"] = int(command[1])
|
||||
cmd_dict["flags"] = [str_if_bytes(flag) for flag in command[2]]
|
||||
cmd_dict["first_key_pos"] = command[3]
|
||||
cmd_dict["last_key_pos"] = command[4]
|
||||
cmd_dict["step_count"] = command[5]
|
||||
if len(command) > 7:
|
||||
cmd_dict["tips"] = command[7]
|
||||
cmd_dict["key_specifications"] = command[8]
|
||||
cmd_dict["subcommands"] = command[9]
|
||||
commands[cmd_name] = cmd_dict
|
||||
return commands
|
||||
|
||||
|
||||
def parse_command_resp3(response, **options):
|
||||
commands = {}
|
||||
for command in response:
|
||||
cmd_dict = {}
|
||||
cmd_name = str_if_bytes(command[0])
|
||||
cmd_dict["name"] = cmd_name
|
||||
cmd_dict["arity"] = command[1]
|
||||
cmd_dict["flags"] = {str_if_bytes(flag) for flag in command[2]}
|
||||
cmd_dict["first_key_pos"] = command[3]
|
||||
cmd_dict["last_key_pos"] = command[4]
|
||||
cmd_dict["step_count"] = command[5]
|
||||
cmd_dict["acl_categories"] = command[6]
|
||||
if len(command) > 7:
|
||||
cmd_dict["tips"] = command[7]
|
||||
cmd_dict["key_specifications"] = command[8]
|
||||
cmd_dict["subcommands"] = command[9]
|
||||
|
||||
commands[cmd_name] = cmd_dict
|
||||
return commands
|
||||
|
||||
|
||||
def parse_pubsub_numsub(response, **options):
|
||||
return list(zip(response[0::2], response[1::2]))
|
||||
|
||||
|
||||
def parse_client_kill(response, **options):
|
||||
if isinstance(response, int):
|
||||
return response
|
||||
return str_if_bytes(response) == "OK"
|
||||
|
||||
|
||||
def parse_acl_getuser(response, **options):
|
||||
if response is None:
|
||||
return None
|
||||
if isinstance(response, list):
|
||||
data = pairs_to_dict(response, decode_keys=True)
|
||||
else:
|
||||
data = {str_if_bytes(key): value for key, value in response.items()}
|
||||
|
||||
# convert everything but user-defined data in 'keys' to native strings
|
||||
data["flags"] = list(map(str_if_bytes, data["flags"]))
|
||||
data["passwords"] = list(map(str_if_bytes, data["passwords"]))
|
||||
data["commands"] = str_if_bytes(data["commands"])
|
||||
if isinstance(data["keys"], str) or isinstance(data["keys"], bytes):
|
||||
data["keys"] = list(str_if_bytes(data["keys"]).split(" "))
|
||||
if data["keys"] == [""]:
|
||||
data["keys"] = []
|
||||
if "channels" in data:
|
||||
if isinstance(data["channels"], str) or isinstance(data["channels"], bytes):
|
||||
data["channels"] = list(str_if_bytes(data["channels"]).split(" "))
|
||||
if data["channels"] == [""]:
|
||||
data["channels"] = []
|
||||
if "selectors" in data:
|
||||
if data["selectors"] != [] and isinstance(data["selectors"][0], list):
|
||||
data["selectors"] = [
|
||||
list(map(str_if_bytes, selector)) for selector in data["selectors"]
|
||||
]
|
||||
elif data["selectors"] != []:
|
||||
data["selectors"] = [
|
||||
{str_if_bytes(k): str_if_bytes(v) for k, v in selector.items()}
|
||||
for selector in data["selectors"]
|
||||
]
|
||||
|
||||
# split 'commands' into separate 'categories' and 'commands' lists
|
||||
commands, categories = [], []
|
||||
for command in data["commands"].split(" "):
|
||||
categories.append(command) if "@" in command else commands.append(command)
|
||||
|
||||
data["commands"] = commands
|
||||
data["categories"] = categories
|
||||
data["enabled"] = "on" in data["flags"]
|
||||
return data
|
||||
|
||||
|
||||
def parse_acl_log(response, **options):
|
||||
if response is None:
|
||||
return None
|
||||
if isinstance(response, list):
|
||||
data = []
|
||||
for log in response:
|
||||
log_data = pairs_to_dict(log, True, True)
|
||||
client_info = log_data.get("client-info", "")
|
||||
log_data["client-info"] = parse_client_info(client_info)
|
||||
|
||||
# float() is lossy comparing to the "double" in C
|
||||
log_data["age-seconds"] = float(log_data["age-seconds"])
|
||||
data.append(log_data)
|
||||
else:
|
||||
data = bool_ok(response)
|
||||
return data
|
||||
|
||||
|
||||
def parse_client_info(value):
|
||||
"""
|
||||
Parsing client-info in ACL Log in following format.
|
||||
"key1=value1 key2=value2 key3=value3"
|
||||
"""
|
||||
client_info = {}
|
||||
for info in str_if_bytes(value).strip().split():
|
||||
key, value = info.split("=")
|
||||
client_info[key] = value
|
||||
|
||||
# Those fields are defined as int in networking.c
|
||||
for int_key in {
|
||||
"id",
|
||||
"age",
|
||||
"idle",
|
||||
"db",
|
||||
"sub",
|
||||
"psub",
|
||||
"multi",
|
||||
"qbuf",
|
||||
"qbuf-free",
|
||||
"obl",
|
||||
"argv-mem",
|
||||
"oll",
|
||||
"omem",
|
||||
"tot-mem",
|
||||
}:
|
||||
if int_key in client_info:
|
||||
client_info[int_key] = int(client_info[int_key])
|
||||
return client_info
|
||||
|
||||
|
||||
def parse_set_result(response, **options):
|
||||
"""
|
||||
Handle SET result since GET argument is available since Redis 6.2.
|
||||
Parsing SET result into:
|
||||
- BOOL
|
||||
- String when GET argument is used
|
||||
"""
|
||||
if options.get("get"):
|
||||
# Redis will return a getCommand result.
|
||||
# See `setGenericCommand` in t_string.c
|
||||
return response
|
||||
return response and str_if_bytes(response) == "OK"
|
||||
|
||||
|
||||
def string_keys_to_dict(key_string, callback):
|
||||
return dict.fromkeys(key_string.split(), callback)
|
||||
|
||||
|
||||
_RedisCallbacks = {
|
||||
**string_keys_to_dict(
|
||||
"AUTH COPY EXPIRE EXPIREAT HEXISTS HMSET MOVE MSETNX PERSIST PSETEX "
|
||||
"PEXPIRE PEXPIREAT RENAMENX SETEX SETNX SMOVE",
|
||||
bool,
|
||||
),
|
||||
**string_keys_to_dict("HINCRBYFLOAT INCRBYFLOAT", float),
|
||||
**string_keys_to_dict(
|
||||
"ASKING FLUSHALL FLUSHDB LSET LTRIM MSET PFMERGE READONLY READWRITE "
|
||||
"RENAME SAVE SELECT SHUTDOWN SLAVEOF SWAPDB WATCH UNWATCH",
|
||||
bool_ok,
|
||||
),
|
||||
**string_keys_to_dict("XREAD XREADGROUP", parse_xread),
|
||||
**string_keys_to_dict(
|
||||
"GEORADIUS GEORADIUSBYMEMBER GEOSEARCH",
|
||||
parse_geosearch_generic,
|
||||
),
|
||||
**string_keys_to_dict("XRANGE XREVRANGE", parse_stream_list),
|
||||
"ACL GETUSER": parse_acl_getuser,
|
||||
"ACL LOAD": bool_ok,
|
||||
"ACL LOG": parse_acl_log,
|
||||
"ACL SETUSER": bool_ok,
|
||||
"ACL SAVE": bool_ok,
|
||||
"CLIENT INFO": parse_client_info,
|
||||
"CLIENT KILL": parse_client_kill,
|
||||
"CLIENT LIST": parse_client_list,
|
||||
"CLIENT PAUSE": bool_ok,
|
||||
"CLIENT SETINFO": bool_ok,
|
||||
"CLIENT SETNAME": bool_ok,
|
||||
"CLIENT UNBLOCK": bool,
|
||||
"CLUSTER ADDSLOTS": bool_ok,
|
||||
"CLUSTER ADDSLOTSRANGE": bool_ok,
|
||||
"CLUSTER DELSLOTS": bool_ok,
|
||||
"CLUSTER DELSLOTSRANGE": bool_ok,
|
||||
"CLUSTER FAILOVER": bool_ok,
|
||||
"CLUSTER FORGET": bool_ok,
|
||||
"CLUSTER INFO": parse_cluster_info,
|
||||
"CLUSTER MEET": bool_ok,
|
||||
"CLUSTER NODES": parse_cluster_nodes,
|
||||
"CLUSTER REPLICAS": parse_cluster_nodes,
|
||||
"CLUSTER REPLICATE": bool_ok,
|
||||
"CLUSTER RESET": bool_ok,
|
||||
"CLUSTER SAVECONFIG": bool_ok,
|
||||
"CLUSTER SET-CONFIG-EPOCH": bool_ok,
|
||||
"CLUSTER SETSLOT": bool_ok,
|
||||
"CLUSTER SLAVES": parse_cluster_nodes,
|
||||
"COMMAND": parse_command,
|
||||
"CONFIG RESETSTAT": bool_ok,
|
||||
"CONFIG SET": bool_ok,
|
||||
"FUNCTION DELETE": bool_ok,
|
||||
"FUNCTION FLUSH": bool_ok,
|
||||
"FUNCTION RESTORE": bool_ok,
|
||||
"GEODIST": float_or_none,
|
||||
"HSCAN": parse_hscan,
|
||||
"INFO": parse_info,
|
||||
"LASTSAVE": timestamp_to_datetime,
|
||||
"MEMORY PURGE": bool_ok,
|
||||
"MODULE LOAD": bool,
|
||||
"MODULE UNLOAD": bool,
|
||||
"PING": lambda r: str_if_bytes(r) == "PONG",
|
||||
"PUBSUB NUMSUB": parse_pubsub_numsub,
|
||||
"PUBSUB SHARDNUMSUB": parse_pubsub_numsub,
|
||||
"QUIT": bool_ok,
|
||||
"SET": parse_set_result,
|
||||
"SCAN": parse_scan,
|
||||
"SCRIPT EXISTS": lambda r: list(map(bool, r)),
|
||||
"SCRIPT FLUSH": bool_ok,
|
||||
"SCRIPT KILL": bool_ok,
|
||||
"SCRIPT LOAD": str_if_bytes,
|
||||
"SENTINEL CKQUORUM": bool_ok,
|
||||
"SENTINEL FAILOVER": bool_ok,
|
||||
"SENTINEL FLUSHCONFIG": bool_ok,
|
||||
"SENTINEL GET-MASTER-ADDR-BY-NAME": parse_sentinel_get_master,
|
||||
"SENTINEL MONITOR": bool_ok,
|
||||
"SENTINEL RESET": bool_ok,
|
||||
"SENTINEL REMOVE": bool_ok,
|
||||
"SENTINEL SET": bool_ok,
|
||||
"SLOWLOG GET": parse_slowlog_get,
|
||||
"SLOWLOG RESET": bool_ok,
|
||||
"SORT": sort_return_tuples,
|
||||
"SSCAN": parse_scan,
|
||||
"TIME": lambda x: (int(x[0]), int(x[1])),
|
||||
"XAUTOCLAIM": parse_xautoclaim,
|
||||
"XCLAIM": parse_xclaim,
|
||||
"XGROUP CREATE": bool_ok,
|
||||
"XGROUP DESTROY": bool,
|
||||
"XGROUP SETID": bool_ok,
|
||||
"XINFO STREAM": parse_xinfo_stream,
|
||||
"XPENDING": parse_xpending,
|
||||
"ZSCAN": parse_zscan,
|
||||
}
|
||||
|
||||
|
||||
_RedisCallbacksRESP2 = {
|
||||
**string_keys_to_dict(
|
||||
"SDIFF SINTER SMEMBERS SUNION", lambda r: r and set(r) or set()
|
||||
),
|
||||
**string_keys_to_dict(
|
||||
"ZDIFF ZINTER ZPOPMAX ZPOPMIN ZRANGE ZRANGEBYSCORE ZRANK ZREVRANGE "
|
||||
"ZREVRANGEBYSCORE ZREVRANK ZUNION",
|
||||
zset_score_pairs,
|
||||
),
|
||||
**string_keys_to_dict("ZINCRBY ZSCORE", float_or_none),
|
||||
**string_keys_to_dict("BGREWRITEAOF BGSAVE", lambda r: True),
|
||||
**string_keys_to_dict("BLPOP BRPOP", lambda r: r and tuple(r) or None),
|
||||
**string_keys_to_dict(
|
||||
"BZPOPMAX BZPOPMIN", lambda r: r and (r[0], r[1], float(r[2])) or None
|
||||
),
|
||||
"ACL CAT": lambda r: list(map(str_if_bytes, r)),
|
||||
"ACL GENPASS": str_if_bytes,
|
||||
"ACL HELP": lambda r: list(map(str_if_bytes, r)),
|
||||
"ACL LIST": lambda r: list(map(str_if_bytes, r)),
|
||||
"ACL USERS": lambda r: list(map(str_if_bytes, r)),
|
||||
"ACL WHOAMI": str_if_bytes,
|
||||
"CLIENT GETNAME": str_if_bytes,
|
||||
"CLIENT TRACKINGINFO": lambda r: list(map(str_if_bytes, r)),
|
||||
"CLUSTER GETKEYSINSLOT": lambda r: list(map(str_if_bytes, r)),
|
||||
"COMMAND GETKEYS": lambda r: list(map(str_if_bytes, r)),
|
||||
"CONFIG GET": parse_config_get,
|
||||
"DEBUG OBJECT": parse_debug_object,
|
||||
"GEOHASH": lambda r: list(map(str_if_bytes, r)),
|
||||
"GEOPOS": lambda r: list(
|
||||
map(lambda ll: (float(ll[0]), float(ll[1])) if ll is not None else None, r)
|
||||
),
|
||||
"HGETALL": lambda r: r and pairs_to_dict(r) or {},
|
||||
"MEMORY STATS": parse_memory_stats,
|
||||
"MODULE LIST": lambda r: [pairs_to_dict(m) for m in r],
|
||||
"RESET": str_if_bytes,
|
||||
"SENTINEL MASTER": parse_sentinel_master,
|
||||
"SENTINEL MASTERS": parse_sentinel_masters,
|
||||
"SENTINEL SENTINELS": parse_sentinel_slaves_and_sentinels,
|
||||
"SENTINEL SLAVES": parse_sentinel_slaves_and_sentinels,
|
||||
"STRALGO": parse_stralgo,
|
||||
"XINFO CONSUMERS": parse_list_of_dicts,
|
||||
"XINFO GROUPS": parse_list_of_dicts,
|
||||
"ZADD": parse_zadd,
|
||||
"ZMSCORE": parse_zmscore,
|
||||
}
|
||||
|
||||
|
||||
_RedisCallbacksRESP3 = {
|
||||
**string_keys_to_dict(
|
||||
"SDIFF SINTER SMEMBERS SUNION", lambda r: r and set(r) or set()
|
||||
),
|
||||
**string_keys_to_dict(
|
||||
"ZRANGE ZINTER ZPOPMAX ZPOPMIN ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE "
|
||||
"ZUNION HGETALL XREADGROUP",
|
||||
lambda r, **kwargs: r,
|
||||
),
|
||||
**string_keys_to_dict("XREAD XREADGROUP", parse_xread_resp3),
|
||||
"ACL LOG": lambda r: (
|
||||
[
|
||||
{str_if_bytes(key): str_if_bytes(value) for key, value in x.items()}
|
||||
for x in r
|
||||
]
|
||||
if isinstance(r, list)
|
||||
else bool_ok(r)
|
||||
),
|
||||
"COMMAND": parse_command_resp3,
|
||||
"CONFIG GET": lambda r: {
|
||||
str_if_bytes(key) if key is not None else None: (
|
||||
str_if_bytes(value) if value is not None else None
|
||||
)
|
||||
for key, value in r.items()
|
||||
},
|
||||
"MEMORY STATS": lambda r: {str_if_bytes(key): value for key, value in r.items()},
|
||||
"SENTINEL MASTER": parse_sentinel_state_resp3,
|
||||
"SENTINEL MASTERS": parse_sentinel_masters_resp3,
|
||||
"SENTINEL SENTINELS": parse_sentinel_slaves_and_sentinels_resp3,
|
||||
"SENTINEL SLAVES": parse_sentinel_slaves_and_sentinels_resp3,
|
||||
"STRALGO": lambda r, **options: (
|
||||
{str_if_bytes(key): str_if_bytes(value) for key, value in r.items()}
|
||||
if isinstance(r, dict)
|
||||
else str_if_bytes(r)
|
||||
),
|
||||
"XINFO CONSUMERS": lambda r: [
|
||||
{str_if_bytes(key): value for key, value in x.items()} for x in r
|
||||
],
|
||||
"XINFO GROUPS": lambda r: [
|
||||
{str_if_bytes(key): value for key, value in d.items()} for d in r
|
||||
],
|
||||
}
|
||||
@@ -0,0 +1,295 @@
|
||||
import asyncio
|
||||
import socket
|
||||
import sys
|
||||
from logging import getLogger
|
||||
from typing import Callable, List, Optional, TypedDict, Union
|
||||
|
||||
if sys.version_info.major >= 3 and sys.version_info.minor >= 11:
|
||||
from asyncio import timeout as async_timeout
|
||||
else:
|
||||
from async_timeout import timeout as async_timeout
|
||||
|
||||
from ..exceptions import ConnectionError, InvalidResponse, RedisError
|
||||
from ..typing import EncodableT
|
||||
from ..utils import HIREDIS_AVAILABLE
|
||||
from .base import (
|
||||
AsyncBaseParser,
|
||||
AsyncPushNotificationsParser,
|
||||
BaseParser,
|
||||
PushNotificationsParser,
|
||||
)
|
||||
from .socket import (
|
||||
NONBLOCKING_EXCEPTION_ERROR_NUMBERS,
|
||||
NONBLOCKING_EXCEPTIONS,
|
||||
SENTINEL,
|
||||
SERVER_CLOSED_CONNECTION_ERROR,
|
||||
)
|
||||
|
||||
# Used to signal that hiredis-py does not have enough data to parse.
|
||||
# Using `False` or `None` is not reliable, given that the parser can
|
||||
# return `False` or `None` for legitimate reasons from RESP payloads.
|
||||
NOT_ENOUGH_DATA = object()
|
||||
|
||||
|
||||
class _HiredisReaderArgs(TypedDict, total=False):
|
||||
protocolError: Callable[[str], Exception]
|
||||
replyError: Callable[[str], Exception]
|
||||
encoding: Optional[str]
|
||||
errors: Optional[str]
|
||||
|
||||
|
||||
class _HiredisParser(BaseParser, PushNotificationsParser):
|
||||
"Parser class for connections using Hiredis"
|
||||
|
||||
def __init__(self, socket_read_size):
|
||||
if not HIREDIS_AVAILABLE:
|
||||
raise RedisError("Hiredis is not installed")
|
||||
self.socket_read_size = socket_read_size
|
||||
self._buffer = bytearray(socket_read_size)
|
||||
self.pubsub_push_handler_func = self.handle_pubsub_push_response
|
||||
self.invalidation_push_handler_func = None
|
||||
self._hiredis_PushNotificationType = None
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.on_disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def handle_pubsub_push_response(self, response):
|
||||
logger = getLogger("push_response")
|
||||
logger.debug("Push response: " + str(response))
|
||||
return response
|
||||
|
||||
def on_connect(self, connection, **kwargs):
|
||||
import hiredis
|
||||
|
||||
self._sock = connection._sock
|
||||
self._socket_timeout = connection.socket_timeout
|
||||
kwargs = {
|
||||
"protocolError": InvalidResponse,
|
||||
"replyError": self.parse_error,
|
||||
"errors": connection.encoder.encoding_errors,
|
||||
"notEnoughData": NOT_ENOUGH_DATA,
|
||||
}
|
||||
|
||||
if connection.encoder.decode_responses:
|
||||
kwargs["encoding"] = connection.encoder.encoding
|
||||
self._reader = hiredis.Reader(**kwargs)
|
||||
self._next_response = NOT_ENOUGH_DATA
|
||||
|
||||
try:
|
||||
self._hiredis_PushNotificationType = hiredis.PushNotification
|
||||
except AttributeError:
|
||||
# hiredis < 3.2
|
||||
self._hiredis_PushNotificationType = None
|
||||
|
||||
def on_disconnect(self):
|
||||
self._sock = None
|
||||
self._reader = None
|
||||
self._next_response = NOT_ENOUGH_DATA
|
||||
|
||||
def can_read(self, timeout):
|
||||
if not self._reader:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
|
||||
if self._next_response is NOT_ENOUGH_DATA:
|
||||
self._next_response = self._reader.gets()
|
||||
if self._next_response is NOT_ENOUGH_DATA:
|
||||
return self.read_from_socket(timeout=timeout, raise_on_timeout=False)
|
||||
return True
|
||||
|
||||
def read_from_socket(self, timeout=SENTINEL, raise_on_timeout=True):
|
||||
sock = self._sock
|
||||
custom_timeout = timeout is not SENTINEL
|
||||
try:
|
||||
if custom_timeout:
|
||||
sock.settimeout(timeout)
|
||||
bufflen = self._sock.recv_into(self._buffer)
|
||||
if bufflen == 0:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
self._reader.feed(self._buffer, 0, bufflen)
|
||||
# data was read from the socket and added to the buffer.
|
||||
# return True to indicate that data was read.
|
||||
return True
|
||||
except socket.timeout:
|
||||
if raise_on_timeout:
|
||||
raise TimeoutError("Timeout reading from socket")
|
||||
return False
|
||||
except NONBLOCKING_EXCEPTIONS as ex:
|
||||
# if we're in nonblocking mode and the recv raises a
|
||||
# blocking error, simply return False indicating that
|
||||
# there's no data to be read. otherwise raise the
|
||||
# original exception.
|
||||
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
|
||||
if not raise_on_timeout and ex.errno == allowed:
|
||||
return False
|
||||
raise ConnectionError(f"Error while reading from socket: {ex.args}")
|
||||
finally:
|
||||
if custom_timeout:
|
||||
sock.settimeout(self._socket_timeout)
|
||||
|
||||
def read_response(self, disable_decoding=False, push_request=False):
|
||||
if not self._reader:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
|
||||
# _next_response might be cached from a can_read() call
|
||||
if self._next_response is not NOT_ENOUGH_DATA:
|
||||
response = self._next_response
|
||||
self._next_response = NOT_ENOUGH_DATA
|
||||
if self._hiredis_PushNotificationType is not None and isinstance(
|
||||
response, self._hiredis_PushNotificationType
|
||||
):
|
||||
response = self.handle_push_response(response)
|
||||
if not push_request:
|
||||
return self.read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
else:
|
||||
return response
|
||||
return response
|
||||
|
||||
if disable_decoding:
|
||||
response = self._reader.gets(False)
|
||||
else:
|
||||
response = self._reader.gets()
|
||||
|
||||
while response is NOT_ENOUGH_DATA:
|
||||
self.read_from_socket()
|
||||
if disable_decoding:
|
||||
response = self._reader.gets(False)
|
||||
else:
|
||||
response = self._reader.gets()
|
||||
# if the response is a ConnectionError or the response is a list and
|
||||
# the first item is a ConnectionError, raise it as something bad
|
||||
# happened
|
||||
if isinstance(response, ConnectionError):
|
||||
raise response
|
||||
elif self._hiredis_PushNotificationType is not None and isinstance(
|
||||
response, self._hiredis_PushNotificationType
|
||||
):
|
||||
response = self.handle_push_response(response)
|
||||
if not push_request:
|
||||
return self.read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
else:
|
||||
return response
|
||||
elif (
|
||||
isinstance(response, list)
|
||||
and response
|
||||
and isinstance(response[0], ConnectionError)
|
||||
):
|
||||
raise response[0]
|
||||
return response
|
||||
|
||||
|
||||
class _AsyncHiredisParser(AsyncBaseParser, AsyncPushNotificationsParser):
|
||||
"""Async implementation of parser class for connections using Hiredis"""
|
||||
|
||||
__slots__ = ("_reader",)
|
||||
|
||||
def __init__(self, socket_read_size: int):
|
||||
if not HIREDIS_AVAILABLE:
|
||||
raise RedisError("Hiredis is not available.")
|
||||
super().__init__(socket_read_size=socket_read_size)
|
||||
self._reader = None
|
||||
self.pubsub_push_handler_func = self.handle_pubsub_push_response
|
||||
self.invalidation_push_handler_func = None
|
||||
self._hiredis_PushNotificationType = None
|
||||
|
||||
async def handle_pubsub_push_response(self, response):
|
||||
logger = getLogger("push_response")
|
||||
logger.debug("Push response: " + str(response))
|
||||
return response
|
||||
|
||||
def on_connect(self, connection):
|
||||
import hiredis
|
||||
|
||||
self._stream = connection._reader
|
||||
kwargs: _HiredisReaderArgs = {
|
||||
"protocolError": InvalidResponse,
|
||||
"replyError": self.parse_error,
|
||||
"notEnoughData": NOT_ENOUGH_DATA,
|
||||
}
|
||||
if connection.encoder.decode_responses:
|
||||
kwargs["encoding"] = connection.encoder.encoding
|
||||
kwargs["errors"] = connection.encoder.encoding_errors
|
||||
|
||||
self._reader = hiredis.Reader(**kwargs)
|
||||
self._connected = True
|
||||
|
||||
try:
|
||||
self._hiredis_PushNotificationType = getattr(
|
||||
hiredis, "PushNotification", None
|
||||
)
|
||||
except AttributeError:
|
||||
# hiredis < 3.2
|
||||
self._hiredis_PushNotificationType = None
|
||||
|
||||
def on_disconnect(self):
|
||||
self._connected = False
|
||||
|
||||
async def can_read_destructive(self):
|
||||
if not self._connected:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
if self._reader.gets() is not NOT_ENOUGH_DATA:
|
||||
return True
|
||||
try:
|
||||
async with async_timeout(0):
|
||||
return await self.read_from_socket()
|
||||
except asyncio.TimeoutError:
|
||||
return False
|
||||
|
||||
async def read_from_socket(self):
|
||||
buffer = await self._stream.read(self._read_size)
|
||||
if not buffer or not isinstance(buffer, bytes):
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None
|
||||
self._reader.feed(buffer)
|
||||
# data was read from the socket and added to the buffer.
|
||||
# return True to indicate that data was read.
|
||||
return True
|
||||
|
||||
async def read_response(
|
||||
self, disable_decoding: bool = False, push_request: bool = False
|
||||
) -> Union[EncodableT, List[EncodableT]]:
|
||||
# If `on_disconnect()` has been called, prohibit any more reads
|
||||
# even if they could happen because data might be present.
|
||||
# We still allow reads in progress to finish
|
||||
if not self._connected:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None
|
||||
|
||||
if disable_decoding:
|
||||
response = self._reader.gets(False)
|
||||
else:
|
||||
response = self._reader.gets()
|
||||
|
||||
while response is NOT_ENOUGH_DATA:
|
||||
await self.read_from_socket()
|
||||
if disable_decoding:
|
||||
response = self._reader.gets(False)
|
||||
else:
|
||||
response = self._reader.gets()
|
||||
|
||||
# if the response is a ConnectionError or the response is a list and
|
||||
# the first item is a ConnectionError, raise it as something bad
|
||||
# happened
|
||||
if isinstance(response, ConnectionError):
|
||||
raise response
|
||||
elif self._hiredis_PushNotificationType is not None and isinstance(
|
||||
response, self._hiredis_PushNotificationType
|
||||
):
|
||||
response = await self.handle_push_response(response)
|
||||
if not push_request:
|
||||
return await self.read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
else:
|
||||
return response
|
||||
elif (
|
||||
isinstance(response, list)
|
||||
and response
|
||||
and isinstance(response[0], ConnectionError)
|
||||
):
|
||||
raise response[0]
|
||||
return response
|
||||
@@ -0,0 +1,132 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from ..exceptions import ConnectionError, InvalidResponse, ResponseError
|
||||
from ..typing import EncodableT
|
||||
from .base import _AsyncRESPBase, _RESPBase
|
||||
from .socket import SERVER_CLOSED_CONNECTION_ERROR
|
||||
|
||||
|
||||
class _RESP2Parser(_RESPBase):
|
||||
"""RESP2 protocol implementation"""
|
||||
|
||||
def read_response(self, disable_decoding=False):
|
||||
pos = self._buffer.get_pos() if self._buffer else None
|
||||
try:
|
||||
result = self._read_response(disable_decoding=disable_decoding)
|
||||
except BaseException:
|
||||
if self._buffer:
|
||||
self._buffer.rewind(pos)
|
||||
raise
|
||||
else:
|
||||
self._buffer.purge()
|
||||
return result
|
||||
|
||||
def _read_response(self, disable_decoding=False):
|
||||
raw = self._buffer.readline()
|
||||
if not raw:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
|
||||
byte, response = raw[:1], raw[1:]
|
||||
|
||||
# server returned an error
|
||||
if byte == b"-":
|
||||
response = response.decode("utf-8", errors="replace")
|
||||
error = self.parse_error(response)
|
||||
# if the error is a ConnectionError, raise immediately so the user
|
||||
# is notified
|
||||
if isinstance(error, ConnectionError):
|
||||
raise error
|
||||
# otherwise, we're dealing with a ResponseError that might belong
|
||||
# inside a pipeline response. the connection's read_response()
|
||||
# and/or the pipeline's execute() will raise this error if
|
||||
# necessary, so just return the exception instance here.
|
||||
return error
|
||||
# single value
|
||||
elif byte == b"+":
|
||||
pass
|
||||
# int value
|
||||
elif byte == b":":
|
||||
return int(response)
|
||||
# bulk response
|
||||
elif byte == b"$" and response == b"-1":
|
||||
return None
|
||||
elif byte == b"$":
|
||||
response = self._buffer.read(int(response))
|
||||
# multi-bulk response
|
||||
elif byte == b"*" and response == b"-1":
|
||||
return None
|
||||
elif byte == b"*":
|
||||
response = [
|
||||
self._read_response(disable_decoding=disable_decoding)
|
||||
for i in range(int(response))
|
||||
]
|
||||
else:
|
||||
raise InvalidResponse(f"Protocol Error: {raw!r}")
|
||||
|
||||
if disable_decoding is False:
|
||||
response = self.encoder.decode(response)
|
||||
return response
|
||||
|
||||
|
||||
class _AsyncRESP2Parser(_AsyncRESPBase):
|
||||
"""Async class for the RESP2 protocol"""
|
||||
|
||||
async def read_response(self, disable_decoding: bool = False):
|
||||
if not self._connected:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
if self._chunks:
|
||||
# augment parsing buffer with previously read data
|
||||
self._buffer += b"".join(self._chunks)
|
||||
self._chunks.clear()
|
||||
self._pos = 0
|
||||
response = await self._read_response(disable_decoding=disable_decoding)
|
||||
# Successfully parsing a response allows us to clear our parsing buffer
|
||||
self._clear()
|
||||
return response
|
||||
|
||||
async def _read_response(
|
||||
self, disable_decoding: bool = False
|
||||
) -> Union[EncodableT, ResponseError, None]:
|
||||
raw = await self._readline()
|
||||
response: Any
|
||||
byte, response = raw[:1], raw[1:]
|
||||
|
||||
# server returned an error
|
||||
if byte == b"-":
|
||||
response = response.decode("utf-8", errors="replace")
|
||||
error = self.parse_error(response)
|
||||
# if the error is a ConnectionError, raise immediately so the user
|
||||
# is notified
|
||||
if isinstance(error, ConnectionError):
|
||||
self._clear() # Successful parse
|
||||
raise error
|
||||
# otherwise, we're dealing with a ResponseError that might belong
|
||||
# inside a pipeline response. the connection's read_response()
|
||||
# and/or the pipeline's execute() will raise this error if
|
||||
# necessary, so just return the exception instance here.
|
||||
return error
|
||||
# single value
|
||||
elif byte == b"+":
|
||||
pass
|
||||
# int value
|
||||
elif byte == b":":
|
||||
return int(response)
|
||||
# bulk response
|
||||
elif byte == b"$" and response == b"-1":
|
||||
return None
|
||||
elif byte == b"$":
|
||||
response = await self._read(int(response))
|
||||
# multi-bulk response
|
||||
elif byte == b"*" and response == b"-1":
|
||||
return None
|
||||
elif byte == b"*":
|
||||
response = [
|
||||
(await self._read_response(disable_decoding))
|
||||
for _ in range(int(response)) # noqa
|
||||
]
|
||||
else:
|
||||
raise InvalidResponse(f"Protocol Error: {raw!r}")
|
||||
|
||||
if disable_decoding is False:
|
||||
response = self.encoder.decode(response)
|
||||
return response
|
||||
@@ -0,0 +1,257 @@
|
||||
from logging import getLogger
|
||||
from typing import Any, Union
|
||||
|
||||
from ..exceptions import ConnectionError, InvalidResponse, ResponseError
|
||||
from ..typing import EncodableT
|
||||
from .base import (
|
||||
AsyncPushNotificationsParser,
|
||||
PushNotificationsParser,
|
||||
_AsyncRESPBase,
|
||||
_RESPBase,
|
||||
)
|
||||
from .socket import SERVER_CLOSED_CONNECTION_ERROR
|
||||
|
||||
|
||||
class _RESP3Parser(_RESPBase, PushNotificationsParser):
|
||||
"""RESP3 protocol implementation"""
|
||||
|
||||
def __init__(self, socket_read_size):
|
||||
super().__init__(socket_read_size)
|
||||
self.pubsub_push_handler_func = self.handle_pubsub_push_response
|
||||
self.invalidation_push_handler_func = None
|
||||
|
||||
def handle_pubsub_push_response(self, response):
|
||||
logger = getLogger("push_response")
|
||||
logger.debug("Push response: " + str(response))
|
||||
return response
|
||||
|
||||
def read_response(self, disable_decoding=False, push_request=False):
|
||||
pos = self._buffer.get_pos() if self._buffer else None
|
||||
try:
|
||||
result = self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
except BaseException:
|
||||
if self._buffer:
|
||||
self._buffer.rewind(pos)
|
||||
raise
|
||||
else:
|
||||
self._buffer.purge()
|
||||
return result
|
||||
|
||||
def _read_response(self, disable_decoding=False, push_request=False):
|
||||
raw = self._buffer.readline()
|
||||
if not raw:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
|
||||
byte, response = raw[:1], raw[1:]
|
||||
|
||||
# server returned an error
|
||||
if byte in (b"-", b"!"):
|
||||
if byte == b"!":
|
||||
response = self._buffer.read(int(response))
|
||||
response = response.decode("utf-8", errors="replace")
|
||||
error = self.parse_error(response)
|
||||
# if the error is a ConnectionError, raise immediately so the user
|
||||
# is notified
|
||||
if isinstance(error, ConnectionError):
|
||||
raise error
|
||||
# otherwise, we're dealing with a ResponseError that might belong
|
||||
# inside a pipeline response. the connection's read_response()
|
||||
# and/or the pipeline's execute() will raise this error if
|
||||
# necessary, so just return the exception instance here.
|
||||
return error
|
||||
# single value
|
||||
elif byte == b"+":
|
||||
pass
|
||||
# null value
|
||||
elif byte == b"_":
|
||||
return None
|
||||
# int and big int values
|
||||
elif byte in (b":", b"("):
|
||||
return int(response)
|
||||
# double value
|
||||
elif byte == b",":
|
||||
return float(response)
|
||||
# bool value
|
||||
elif byte == b"#":
|
||||
return response == b"t"
|
||||
# bulk response
|
||||
elif byte == b"$":
|
||||
response = self._buffer.read(int(response))
|
||||
# verbatim string response
|
||||
elif byte == b"=":
|
||||
response = self._buffer.read(int(response))[4:]
|
||||
# array response
|
||||
elif byte == b"*":
|
||||
response = [
|
||||
self._read_response(disable_decoding=disable_decoding)
|
||||
for _ in range(int(response))
|
||||
]
|
||||
# set response
|
||||
elif byte == b"~":
|
||||
# redis can return unhashable types (like dict) in a set,
|
||||
# so we return sets as list, all the time, for predictability
|
||||
response = [
|
||||
self._read_response(disable_decoding=disable_decoding)
|
||||
for _ in range(int(response))
|
||||
]
|
||||
# map response
|
||||
elif byte == b"%":
|
||||
# We cannot use a dict-comprehension to parse stream.
|
||||
# Evaluation order of key:val expression in dict comprehension only
|
||||
# became defined to be left-right in version 3.8
|
||||
resp_dict = {}
|
||||
for _ in range(int(response)):
|
||||
key = self._read_response(disable_decoding=disable_decoding)
|
||||
resp_dict[key] = self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
response = resp_dict
|
||||
# push response
|
||||
elif byte == b">":
|
||||
response = [
|
||||
self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
for _ in range(int(response))
|
||||
]
|
||||
response = self.handle_push_response(response)
|
||||
if not push_request:
|
||||
return self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
else:
|
||||
return response
|
||||
else:
|
||||
raise InvalidResponse(f"Protocol Error: {raw!r}")
|
||||
|
||||
if isinstance(response, bytes) and disable_decoding is False:
|
||||
response = self.encoder.decode(response)
|
||||
return response
|
||||
|
||||
|
||||
class _AsyncRESP3Parser(_AsyncRESPBase, AsyncPushNotificationsParser):
|
||||
def __init__(self, socket_read_size):
|
||||
super().__init__(socket_read_size)
|
||||
self.pubsub_push_handler_func = self.handle_pubsub_push_response
|
||||
self.invalidation_push_handler_func = None
|
||||
|
||||
async def handle_pubsub_push_response(self, response):
|
||||
logger = getLogger("push_response")
|
||||
logger.debug("Push response: " + str(response))
|
||||
return response
|
||||
|
||||
async def read_response(
|
||||
self, disable_decoding: bool = False, push_request: bool = False
|
||||
):
|
||||
if self._chunks:
|
||||
# augment parsing buffer with previously read data
|
||||
self._buffer += b"".join(self._chunks)
|
||||
self._chunks.clear()
|
||||
self._pos = 0
|
||||
response = await self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
# Successfully parsing a response allows us to clear our parsing buffer
|
||||
self._clear()
|
||||
return response
|
||||
|
||||
async def _read_response(
|
||||
self, disable_decoding: bool = False, push_request: bool = False
|
||||
) -> Union[EncodableT, ResponseError, None]:
|
||||
if not self._stream or not self.encoder:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
raw = await self._readline()
|
||||
response: Any
|
||||
byte, response = raw[:1], raw[1:]
|
||||
|
||||
# if byte not in (b"-", b"+", b":", b"$", b"*"):
|
||||
# raise InvalidResponse(f"Protocol Error: {raw!r}")
|
||||
|
||||
# server returned an error
|
||||
if byte in (b"-", b"!"):
|
||||
if byte == b"!":
|
||||
response = await self._read(int(response))
|
||||
response = response.decode("utf-8", errors="replace")
|
||||
error = self.parse_error(response)
|
||||
# if the error is a ConnectionError, raise immediately so the user
|
||||
# is notified
|
||||
if isinstance(error, ConnectionError):
|
||||
self._clear() # Successful parse
|
||||
raise error
|
||||
# otherwise, we're dealing with a ResponseError that might belong
|
||||
# inside a pipeline response. the connection's read_response()
|
||||
# and/or the pipeline's execute() will raise this error if
|
||||
# necessary, so just return the exception instance here.
|
||||
return error
|
||||
# single value
|
||||
elif byte == b"+":
|
||||
pass
|
||||
# null value
|
||||
elif byte == b"_":
|
||||
return None
|
||||
# int and big int values
|
||||
elif byte in (b":", b"("):
|
||||
return int(response)
|
||||
# double value
|
||||
elif byte == b",":
|
||||
return float(response)
|
||||
# bool value
|
||||
elif byte == b"#":
|
||||
return response == b"t"
|
||||
# bulk response
|
||||
elif byte == b"$":
|
||||
response = await self._read(int(response))
|
||||
# verbatim string response
|
||||
elif byte == b"=":
|
||||
response = (await self._read(int(response)))[4:]
|
||||
# array response
|
||||
elif byte == b"*":
|
||||
response = [
|
||||
(await self._read_response(disable_decoding=disable_decoding))
|
||||
for _ in range(int(response))
|
||||
]
|
||||
# set response
|
||||
elif byte == b"~":
|
||||
# redis can return unhashable types (like dict) in a set,
|
||||
# so we always convert to a list, to have predictable return types
|
||||
response = [
|
||||
(await self._read_response(disable_decoding=disable_decoding))
|
||||
for _ in range(int(response))
|
||||
]
|
||||
# map response
|
||||
elif byte == b"%":
|
||||
# We cannot use a dict-comprehension to parse stream.
|
||||
# Evaluation order of key:val expression in dict comprehension only
|
||||
# became defined to be left-right in version 3.8
|
||||
resp_dict = {}
|
||||
for _ in range(int(response)):
|
||||
key = await self._read_response(disable_decoding=disable_decoding)
|
||||
resp_dict[key] = await self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
response = resp_dict
|
||||
# push response
|
||||
elif byte == b">":
|
||||
response = [
|
||||
(
|
||||
await self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
)
|
||||
for _ in range(int(response))
|
||||
]
|
||||
response = await self.handle_push_response(response)
|
||||
if not push_request:
|
||||
return await self._read_response(
|
||||
disable_decoding=disable_decoding, push_request=push_request
|
||||
)
|
||||
else:
|
||||
return response
|
||||
else:
|
||||
raise InvalidResponse(f"Protocol Error: {raw!r}")
|
||||
|
||||
if isinstance(response, bytes) and disable_decoding is False:
|
||||
response = self.encoder.decode(response)
|
||||
return response
|
||||
@@ -0,0 +1,162 @@
|
||||
import errno
|
||||
import io
|
||||
import socket
|
||||
from io import SEEK_END
|
||||
from typing import Optional, Union
|
||||
|
||||
from ..exceptions import ConnectionError, TimeoutError
|
||||
from ..utils import SSL_AVAILABLE
|
||||
|
||||
NONBLOCKING_EXCEPTION_ERROR_NUMBERS = {BlockingIOError: errno.EWOULDBLOCK}
|
||||
|
||||
if SSL_AVAILABLE:
|
||||
import ssl
|
||||
|
||||
if hasattr(ssl, "SSLWantReadError"):
|
||||
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantReadError] = 2
|
||||
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantWriteError] = 2
|
||||
else:
|
||||
NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLError] = 2
|
||||
|
||||
NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys())
|
||||
|
||||
SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server."
|
||||
SENTINEL = object()
|
||||
|
||||
SYM_CRLF = b"\r\n"
|
||||
|
||||
|
||||
class SocketBuffer:
|
||||
def __init__(
|
||||
self, socket: socket.socket, socket_read_size: int, socket_timeout: float
|
||||
):
|
||||
self._sock = socket
|
||||
self.socket_read_size = socket_read_size
|
||||
self.socket_timeout = socket_timeout
|
||||
self._buffer = io.BytesIO()
|
||||
|
||||
def unread_bytes(self) -> int:
|
||||
"""
|
||||
Remaining unread length of buffer
|
||||
"""
|
||||
pos = self._buffer.tell()
|
||||
end = self._buffer.seek(0, SEEK_END)
|
||||
self._buffer.seek(pos)
|
||||
return end - pos
|
||||
|
||||
def _read_from_socket(
|
||||
self,
|
||||
length: Optional[int] = None,
|
||||
timeout: Union[float, object] = SENTINEL,
|
||||
raise_on_timeout: Optional[bool] = True,
|
||||
) -> bool:
|
||||
sock = self._sock
|
||||
socket_read_size = self.socket_read_size
|
||||
marker = 0
|
||||
custom_timeout = timeout is not SENTINEL
|
||||
|
||||
buf = self._buffer
|
||||
current_pos = buf.tell()
|
||||
buf.seek(0, SEEK_END)
|
||||
if custom_timeout:
|
||||
sock.settimeout(timeout)
|
||||
try:
|
||||
while True:
|
||||
data = self._sock.recv(socket_read_size)
|
||||
# an empty string indicates the server shutdown the socket
|
||||
if isinstance(data, bytes) and len(data) == 0:
|
||||
raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR)
|
||||
buf.write(data)
|
||||
data_length = len(data)
|
||||
marker += data_length
|
||||
|
||||
if length is not None and length > marker:
|
||||
continue
|
||||
return True
|
||||
except socket.timeout:
|
||||
if raise_on_timeout:
|
||||
raise TimeoutError("Timeout reading from socket")
|
||||
return False
|
||||
except NONBLOCKING_EXCEPTIONS as ex:
|
||||
# if we're in nonblocking mode and the recv raises a
|
||||
# blocking error, simply return False indicating that
|
||||
# there's no data to be read. otherwise raise the
|
||||
# original exception.
|
||||
allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1)
|
||||
if not raise_on_timeout and ex.errno == allowed:
|
||||
return False
|
||||
raise ConnectionError(f"Error while reading from socket: {ex.args}")
|
||||
finally:
|
||||
buf.seek(current_pos)
|
||||
if custom_timeout:
|
||||
sock.settimeout(self.socket_timeout)
|
||||
|
||||
def can_read(self, timeout: float) -> bool:
|
||||
return bool(self.unread_bytes()) or self._read_from_socket(
|
||||
timeout=timeout, raise_on_timeout=False
|
||||
)
|
||||
|
||||
def read(self, length: int) -> bytes:
|
||||
length = length + 2 # make sure to read the \r\n terminator
|
||||
# BufferIO will return less than requested if buffer is short
|
||||
data = self._buffer.read(length)
|
||||
missing = length - len(data)
|
||||
if missing:
|
||||
# fill up the buffer and read the remainder
|
||||
self._read_from_socket(missing)
|
||||
data += self._buffer.read(missing)
|
||||
return data[:-2]
|
||||
|
||||
def readline(self) -> bytes:
|
||||
buf = self._buffer
|
||||
data = buf.readline()
|
||||
while not data.endswith(SYM_CRLF):
|
||||
# there's more data in the socket that we need
|
||||
self._read_from_socket()
|
||||
data += buf.readline()
|
||||
|
||||
return data[:-2]
|
||||
|
||||
def get_pos(self) -> int:
|
||||
"""
|
||||
Get current read position
|
||||
"""
|
||||
return self._buffer.tell()
|
||||
|
||||
def rewind(self, pos: int) -> None:
|
||||
"""
|
||||
Rewind the buffer to a specific position, to re-start reading
|
||||
"""
|
||||
self._buffer.seek(pos)
|
||||
|
||||
def purge(self) -> None:
|
||||
"""
|
||||
After a successful read, purge the read part of buffer
|
||||
"""
|
||||
unread = self.unread_bytes()
|
||||
|
||||
# Only if we have read all of the buffer do we truncate, to
|
||||
# reduce the amount of memory thrashing. This heuristic
|
||||
# can be changed or removed later.
|
||||
if unread > 0:
|
||||
return
|
||||
|
||||
if unread > 0:
|
||||
# move unread data to the front
|
||||
view = self._buffer.getbuffer()
|
||||
view[:unread] = view[-unread:]
|
||||
self._buffer.truncate(unread)
|
||||
self._buffer.seek(0)
|
||||
|
||||
def close(self) -> None:
|
||||
try:
|
||||
self._buffer.close()
|
||||
except Exception:
|
||||
# issue #633 suggests the purge/close somehow raised a
|
||||
# BadFileDescriptor error. Perhaps the client ran out of
|
||||
# memory or something else? It's probably OK to ignore
|
||||
# any error being raised from purge/close since we're
|
||||
# removing the reference to the instance below.
|
||||
pass
|
||||
self._buffer = None
|
||||
self._sock = None
|
||||
Reference in New Issue
Block a user