diff --git a/valkey/__init__.py b/valkey/__init__.py index e4202fbe..0b10bdac 100644 --- a/valkey/__init__.py +++ b/valkey/__init__.py @@ -44,6 +44,9 @@ def int_or_str(value): return value +__version__: str +VERSION: tuple[int | str, ...] + try: __version__ = metadata.version("valkey") except metadata.PackageNotFoundError: diff --git a/valkey/asyncio/__init__.pyi b/valkey/asyncio/__init__.pyi new file mode 100644 index 00000000..7d45bb0f --- /dev/null +++ b/valkey/asyncio/__init__.pyi @@ -0,0 +1,64 @@ +from valkey.asyncio.client import Valkey as Valkey, StrictValkey as StrictValkey +from valkey.asyncio.cluster import ValkeyCluster as ValkeyCluster +from valkey.asyncio.connection import ( + BlockingConnectionPool as BlockingConnectionPool, + Connection as Connection, + ConnectionPool as ConnectionPool, + SSLConnection as SSLConnection, + UnixDomainSocketConnection as UnixDomainSocketConnection, +) +from valkey.asyncio.parser import CommandsParser as CommandsParser +from valkey.asyncio.sentinel import ( + Sentinel as Sentinel, + SentinelConnectionPool as SentinelConnectionPool, + SentinelManagedConnection as SentinelManagedConnection, + SentinelManagedSSLConnection as SentinelManagedSSLConnection, +) +from valkey.asyncio.utils import from_url as from_url +from valkey.backoff import default_backoff as default_backoff +from valkey.exceptions import ( + AuthenticationError as AuthenticationError, + AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError, + BusyLoadingError as BusyLoadingError, + ChildDeadlockedError as ChildDeadlockedError, + ConnectionError as ConnectionError, + DataError as DataError, + InvalidResponse as InvalidResponse, + PubSubError as PubSubError, + ReadOnlyError as ReadOnlyError, + ValkeyError as ValkeyError, + ResponseError as ResponseError, + TimeoutError as TimeoutError, + WatchError as WatchError, +) + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "CommandsParser", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "default_backoff", + "InvalidResponse", + "PubSubError", + "ReadOnlyError", + "Valkey", + "ValkeyCluster", + "ValkeyError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictValkey", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi new file mode 100644 index 00000000..a33d7cec --- /dev/null +++ b/valkey/asyncio/client.pyi @@ -0,0 +1,1091 @@ +from _typeshed import Incomplete, Unused +from collections.abc import AsyncIterator, Awaitable, Callable, Generator, Iterable, Mapping, MutableMapping, Sequence +from datetime import datetime, timedelta +from types import TracebackType +from typing import Any, ClassVar, Literal, NoReturn, Protocol, TypedDict, overload +from typing_extensions import Self, TypeAlias + +from valkey import ValkeyError +from valkey.asyncio.connection import ConnectCallbackT, Connection, ConnectionPool +from valkey.asyncio.lock import Lock +from valkey.asyncio.retry import Retry +from valkey.client import AbstractValkey, _CommandOptions, _Key, _StrType, _Value +from valkey.commands import AsyncCoreCommands, AsyncSentinelCommands, ValkeyModuleCommands +from valkey.credentials import CredentialProvider +from valkey.typing import ChannelT, EncodableT, KeyT, PatternT, StreamIdT + +PubSubHandler: TypeAlias = Callable[[dict[str, str]], Awaitable[None]] + +class ResponseCallbackProtocol(Protocol): + def __call__(self, response: Any, **kwargs): ... + +class AsyncResponseCallbackProtocol(Protocol): + async def __call__(self, response: Any, **kwargs): ... + +ResponseCallbackT: TypeAlias = ResponseCallbackProtocol | AsyncResponseCallbackProtocol + +class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], AsyncSentinelCommands): + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT] + auto_close_connection_pool: bool + connection_pool: Any + single_connection_client: Any + connection: Any + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> Valkey[str]: ... + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> Valkey[bytes]: ... + @overload + def __init__( + self: Valkey[str], + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[bytes], + *, + host: str = "localhost", + port: int = 6379, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + connection_pool: ConnectionPool[Any] | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + auto_close_connection_pool: bool = True, + valkey_connect_func: ConnectCallbackT | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def __await__(self) -> Generator[Any, None, Self]: ... + async def initialize(self) -> Self: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT): ... + def load_external_module(self, funcname, func) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Pipeline[_StrType]: ... + async def transaction( + self, + func: Callable[[Pipeline[_StrType]], Any | Awaitable[Any]], + *watches: KeyT, + shard_hint: str | None = None, + value_from_callable: bool = False, + watch_delay: float | None = None, + ): ... + def lock( + self, + name: KeyT, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: type[Lock] | None = None, + thread_local: bool = True, + ) -> Lock: ... + def pubsub(self, **kwargs) -> PubSub: ... + def monitor(self) -> Monitor: ... + def client(self) -> Valkey[_StrType]: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self, _warnings: Any = ...) -> None: ... + async def close(self, close_connection_pool: bool | None = None) -> None: ... + async def execute_command(self, *args, **options): ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + +StrictValkey = Valkey + +class MonitorCommandInfo(TypedDict): + time: float + db: int + client_address: str + client_port: str + client_type: str + command: str + +class Monitor: + monitor_re: Any + command_re: Any + connection_pool: Any + connection: Any + def __init__(self, connection_pool: ConnectionPool[Any]) -> None: ... + async def connect(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... + async def next_command(self) -> MonitorCommandInfo: ... + def listen(self) -> AsyncIterator[MonitorCommandInfo]: ... + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, ...]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: str | None + ignore_subscribe_messages: bool + connection: Any + encoder: Any + health_check_response: Iterable[str | bytes] + channels: Any + pending_unsubscribe_channels: Any + patterns: Any + pending_unsubscribe_patterns: Any + def __init__( + self, + connection_pool: ConnectionPool[Any], + shard_hint: str | None = None, + ignore_subscribe_messages: bool = False, + encoder: Incomplete | None = None, + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + async def reset(self) -> None: ... + def close(self) -> Awaitable[NoReturn]: ... + async def on_connect(self, connection: Connection): ... + @property + def subscribed(self) -> bool: ... + async def execute_command(self, *args: EncodableT): ... + async def parse_response(self, block: bool = True, timeout: float = 0): ... + async def check_health(self) -> None: ... + async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): ... + def punsubscribe(self, *args: ChannelT) -> Awaitable[Any]: ... + async def subscribe(self, *args: ChannelT, **kwargs: Callable[..., Any]): ... + def unsubscribe(self, *args) -> Awaitable[Any]: ... + def listen(self) -> AsyncIterator[Any]: ... + async def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0): ... + def ping(self, message: Incomplete | None = None) -> Awaitable[Any]: ... + async def handle_message(self, response, ignore_subscribe_messages: bool = False): ... + async def run(self, *, exception_handler: PSWorkerThreadExcHandlerT | None = None, poll_timeout: float = 1.0) -> None: ... + +class PubsubWorkerExceptionHandler(Protocol): + def __call__(self, e: BaseException, pubsub: PubSub): ... + +class AsyncPubsubWorkerExceptionHandler(Protocol): + async def __call__(self, e: BaseException, pubsub: PubSub): ... + +PSWorkerThreadExcHandlerT: TypeAlias = PubsubWorkerExceptionHandler | AsyncPubsubWorkerExceptionHandler +CommandT: TypeAlias = tuple[tuple[str | bytes, ...], Mapping[str, Any]] +CommandStackT: TypeAlias = list[CommandT] + +class Pipeline(Valkey[_StrType]): + UNWATCH_COMMANDS: ClassVar[set[str]] + connection_pool: Any + connection: Any + response_callbacks: Any + is_transaction: bool + shard_hint: str | None + watching: bool + command_stack: Any + scripts: Any + explicit_transaction: bool + def __init__( + self, + connection_pool: ConnectionPool[Any], + response_callbacks: MutableMapping[str | bytes, ResponseCallbackT], + transaction: bool, + shard_hint: str | None, + ) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Generator[Any, None, Self]: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + async def reset(self) -> None: ... + def multi(self) -> None: ... + def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ... + async def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): ... + def annotate_exception(self, exception: Exception, number: int, command: Iterable[object]) -> None: ... + async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ... + async def load_scripts(self) -> None: ... + async def execute(self, raise_on_error: bool = True): ... + async def discard(self) -> None: ... + async def watch(self, *names: KeyT) -> bool: ... + async def unwatch(self) -> bool: ... + # region acl commands + def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> Any: ... + def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any: ... + def acl_help(self, **kwargs: _CommandOptions) -> Any: ... + def acl_list(self, **kwargs: _CommandOptions) -> Any: ... + def acl_log(self, count: int | None = None, **kwargs: _CommandOptions) -> Any: ... + def acl_log_reset(self, **kwargs: _CommandOptions) -> Any: ... + def acl_load(self, **kwargs: _CommandOptions) -> Any: ... + def acl_save(self, **kwargs: _CommandOptions) -> Any: ... + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self, **kwargs: _CommandOptions) -> Any: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region cluster commands + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions) -> Any: ... + def readwrite(self, **kwargs: _CommandOptions) -> Any: ... + def readonly(self, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region BasicKey commands + def append(self, key, value) -> Any: ... + def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ... + def bitfield(self, key, default_overflow: Incomplete | None = None) -> Any: ... + def bitop(self, operation, dest, *keys) -> Any: ... + def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ... + def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False) -> Any: ... + def decr(self, name, amount: int = 1) -> Any: ... + def decrby(self, name, amount: int = 1) -> Any: ... + def delete(self, *names: _Key) -> Any: ... + def dump(self, name: _Key) -> Any: ... + def exists(self, *names: _Key) -> Any: ... + def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False) -> Any: ... + def get(self, name: _Key) -> Any: ... + def getdel(self, name: _Key) -> Any: ... + def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ) -> Any: ... + def getbit(self, name: _Key, offset: int) -> Any: ... + def getrange(self, key, start, end) -> Any: ... + def getset(self, name, value) -> Any: ... + def incr(self, name: _Key, amount: int = 1) -> Any: ... + def incrby(self, name: _Key, amount: int = 1) -> Any: ... + def incrbyfloat(self, name: _Key, amount: float = 1.0) -> Any: ... + def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ... + def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Any: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Any: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Any: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Any: ... + def move(self, name: _Key, db: int) -> Any: ... + def persist(self, name: _Key) -> Any: ... + def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Any: ... + def psetex(self, name, time_ms, value) -> Any: ... + def pttl(self, name: _Key) -> Any: ... + def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False) -> Any: ... + def randomkey(self, **kwargs: _CommandOptions) -> Any: ... + def rename(self, src, dst) -> Any: ... + def renamenx(self, src, dst) -> Any: ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ) -> Any: ... + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = None, + px: None | int | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> Any: ... + def setbit(self, name: _Key, offset: int, value: int) -> Any: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Any: ... + def setnx(self, name: _Key, value: _Value) -> Any: ... + def setrange(self, name, offset, value) -> Any: ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def strlen(self, name) -> Any: ... + def substr(self, name, start, end: int = -1) -> Any: ... + def touch(self, *args) -> Any: ... + def ttl(self, name: _Key) -> Any: ... + def type(self, name) -> Any: ... + def unlink(self, *names: _Key) -> Any: ... + # endregion + # region hyperlog commands + def pfadd(self, name: _Key, *values: _Value) -> Any: ... + def pfcount(self, name: _Key) -> Any: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> Any: ... + # endregion + # region hash commands + def hdel(self, name: _Key, *keys: _Key) -> Any: ... + def hexists(self, name: _Key, key: _Key) -> Any: ... + def hget(self, name: _Key, key: _Key) -> Any: ... + def hgetall(self, name: _Key) -> Any: ... + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Any: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Any: ... + def hkeys(self, name: _Key) -> Any: ... + def hlen(self, name: _Key) -> Any: ... + @overload + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> Any: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> Any: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Any: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Any: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Any: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def hvals(self, name: _Key) -> Any: ... + def hstrlen(self, name, key) -> Any: ... + # endregion + # region geo commands + def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False) -> Any: ... + def geodist(self, name, place1, place2, unit: Incomplete | None = None) -> Any: ... + def geohash(self, name, *values) -> Any: ... + def geopos(self, name, *values) -> Any: ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ) -> Any: ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ) -> Any: ... + def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ) -> Any: ... + def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ) -> Any: ... + # endregion + # region list commands + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ... + def brpoplpush(self, src, dst, timeout: int | None = 0) -> Any: ... + def lindex(self, name: _Key, index: int) -> Any: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Any: ... + def llen(self, name: _Key) -> Any: ... + def lpop(self, name, count: int | None = None) -> Any: ... + def lpush(self, name: _Value, *values: _Value) -> Any: ... + def lpushx(self, name, value) -> Any: ... + def lrange(self, name: _Key, start: int, end: int) -> Any: ... + def lrem(self, name: _Key, count: int, value: _Value) -> Any: ... + def lset(self, name: _Key, index: int, value: _Value) -> Any: ... + def ltrim(self, name: _Key, start: int, end: int) -> Any: ... + def rpop(self, name, count: int | None = None) -> Any: ... + def rpoplpush(self, src, dst) -> Any: ... + def rpush(self, name: _Value, *values: _Value) -> Any: ... + def rpushx(self, name, value) -> Any: ... + def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ) -> Any: ... + @overload # type: ignore[override] + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> Any: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> Any: ... + # endregion + # region scan commands + def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + @overload + def zscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ... + @overload + def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload + def zscan( + self, name: _Key, cursor: int, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], Any] + ) -> Any: ... + # endregion + # region set commands + def sadd(self, name: _Key, *values: _Value) -> Any: ... + def scard(self, name: _Key) -> Any: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sismember(self, name: _Key, value: _Value) -> Any: ... + def smembers(self, name: _Key) -> Any: ... + def smismember(self, name, values, *args) -> Any: ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> Any: ... + @overload + def spop(self, name: _Key, count: None = None) -> Any: ... + @overload + def spop(self, name: _Key, count: int) -> Any: ... + @overload + def srandmember(self, name: _Key, number: None = None) -> Any: ... + @overload + def srandmember(self, name: _Key, number: int) -> Any: ... + def srem(self, name: _Key, *values: _Value) -> Any: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ... + # endregion + # region stream commands + def xack(self, name, groupname, *ids) -> Any: ... + def xadd( + self, + name, + fields, + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ) -> Any: ... + def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ) -> Any: ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ) -> Any: ... + def xdel(self, name, *ids) -> Any: ... + def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ... + def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ... + def xgroup_destroy(self, name, groupname) -> Any: ... + def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Any: ... + def xinfo_consumers(self, name, groupname) -> Any: ... + def xinfo_groups(self, name) -> Any: ... + def xinfo_stream(self, name, full: bool = False) -> Any: ... + def xlen(self, name: _Key) -> Any: ... + def xpending(self, name, groupname) -> Any: ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ) -> Any: ... + def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None) -> Any: ... + def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None) -> Any: ... + def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ) -> Any: ... + def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None) -> Any: ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ) -> Any: ... + # endregion + # region sorted set commands + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> Any: ... + def zcard(self, name: _Key) -> Any: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zdiff(self, keys, withscores: bool = False) -> Any: ... + def zdiffstore(self, dest, keys) -> Any: ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> Any: ... + def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zpopmax(self, name: _Key, count: int | None = None) -> Any: ... + def zpopmin(self, name: _Key, count: int | None = None) -> Any: ... + def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False) -> Any: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ... + @overload # type: ignore[override] + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Any: ... + @overload # type: ignore[override] + def zrevrange( + self, name: _Key, start: int, end: int, withscores: Literal[True], score_cast_func: Callable[[_StrType], None] + ) -> Any: ... + @overload + def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> Any: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> Any: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ) -> Any: ... + def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None) -> Any: ... + def zrevrangebylex(self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None) -> Any: ... + @overload # type: ignore[override] + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], None], + ) -> Any: ... + @overload + def zrangebyscore( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> Any: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], Any], + ) -> Any: ... + @overload + def zrevrangebyscore( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> Any: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Any: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ... + def zrem(self, name: _Key, *values: _Value) -> Any: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> Any: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Any: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ... + def zscore(self, name: _Key, value: _Value) -> Any: ... + def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ... + def zmscore(self, key, members) -> Any: ... + # endregion + # region management commands + def bgrewriteaof(self, **kwargs: _CommandOptions) -> Any: ... + def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions) -> Any: ... + def role(self) -> Any: ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> Any: ... + def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def client_info(self, **kwargs: _CommandOptions) -> Any: ... + def client_list(self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions) -> Any: ... + def client_getname(self, **kwargs: _CommandOptions) -> Any: ... + def client_getredir(self, **kwargs: _CommandOptions) -> Any: ... + def client_reply(self, reply, **kwargs: _CommandOptions) -> Any: ... + def client_id(self, **kwargs: _CommandOptions) -> Any: ... + def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> Any: ... + def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> Any: ... + def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def client_trackinginfo(self, **kwargs: _CommandOptions) -> Any: ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> Any: ... + def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions) -> Any: ... + def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions) -> Any: ... + def client_unpause(self, **kwargs: _CommandOptions) -> Any: ... + def command(self, **kwargs: _CommandOptions) -> Any: ... + def command_info(self, **kwargs: _CommandOptions) -> Any: ... + def command_count(self, **kwargs: _CommandOptions) -> Any: ... + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Any: ... + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Any: ... + def config_resetstat(self, **kwargs: _CommandOptions) -> Any: ... + def config_rewrite(self, **kwargs: _CommandOptions) -> Any: ... + def dbsize(self, **kwargs: _CommandOptions) -> Any: ... + def debug_object(self, key, **kwargs: _CommandOptions) -> Any: ... + def debug_segfault(self, **kwargs: _CommandOptions) -> Any: ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> Any: ... + def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ... + def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ... + def sync(self) -> Any: ... + def psync(self, replicationid, offset) -> Any: ... + def swapdb(self, first, second, **kwargs: _CommandOptions) -> Any: ... + def select(self, index, **kwargs: _CommandOptions) -> Any: ... + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Any: ... + def lastsave(self, **kwargs: _CommandOptions) -> Any: ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> Any: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ) -> Any: ... + def object(self, infotype, key, **kwargs: _CommandOptions) -> Any: ... + def memory_doctor(self, **kwargs: _CommandOptions) -> Any: ... + def memory_help(self, **kwargs: _CommandOptions) -> Any: ... + def memory_stats(self, **kwargs: _CommandOptions) -> Any: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions) -> Any: ... + def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def memory_purge(self, **kwargs: _CommandOptions) -> Any: ... + def ping(self, **kwargs: _CommandOptions) -> Any: ... + def quit(self, **kwargs: _CommandOptions) -> Any: ... + def replicaof(self, *args, **kwargs: _CommandOptions) -> Any: ... + def save(self, **kwargs: _CommandOptions) -> Any: ... + def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> Any: ... + def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ... + def slowlog_len(self, **kwargs: _CommandOptions) -> Any: ... + def slowlog_reset(self, **kwargs: _CommandOptions) -> Any: ... + def time(self, **kwargs: _CommandOptions) -> Any: ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region module commands + def module_load(self, path, *args) -> Any: ... + def module_unload(self, name) -> Any: ... + def module_list(self) -> Any: ... + def command_getkeys(self, *args) -> Any: ... + # endregion + # region pubsub commands + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> Any: ... + def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> Any: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> Any: ... + # endregion + # region script commands + def eval(self, script, numkeys, *keys_and_args) -> Any: ... + def evalsha(self, sha, numkeys, *keys_and_args) -> Any: ... + def script_exists(self, *args) -> Any: ... + def script_debug(self, *args) -> Any: ... + def script_flush(self, sync_type: Incomplete | None = None) -> Any: ... + def script_kill(self) -> Any: ... + def script_load(self, script) -> Any: ... + def register_script(self, script: str | _StrType) -> Any: ... # type: ignore[override] + # endregion diff --git a/valkey/asyncio/cluster.pyi b/valkey/asyncio/cluster.pyi new file mode 100644 index 00000000..257769d6 --- /dev/null +++ b/valkey/asyncio/cluster.pyi @@ -0,0 +1,229 @@ +from _typeshed import Incomplete +from collections.abc import Awaitable, Callable, Mapping +from types import TracebackType +from typing import Any, Generic, TypeVar +from typing_extensions import Self + +from valkey.asyncio.client import ResponseCallbackT +from valkey.asyncio.connection import AbstractConnection, BaseParser, Connection, Encoder +from valkey.asyncio.parser import CommandsParser +from valkey.client import AbstractValkey +from valkey.cluster import AbstractValkeyCluster, LoadBalancer + +# TODO: add AsyncValkeyClusterCommands stubs +# from valkey.commands import AsyncValkeyClusterCommands +from valkey.commands.core import _StrType +from valkey.credentials import CredentialProvider +from valkey.exceptions import ResponseError +from valkey.retry import Retry +from valkey.typing import AnyKeyT, EncodableT, KeyT + +TargetNodesT = TypeVar("TargetNodesT", str, ClusterNode, list[ClusterNode], dict[Any, ClusterNode]) # noqa: Y001 + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): + def on_disconnect(self) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ... + +class ValkeyCluster(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]): # TODO: AsyncValkeyClusterCommands + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = None, + port: str | int = 6379, + # Cluster related kwargs + startup_nodes: list[ClusterNode] | None = None, + require_full_coverage: bool = True, + read_from_replicas: bool = False, + reinitialize_steps: int = 5, + cluster_error_retry_attempts: int = 3, + connection_error_retry_attempts: int = 3, + max_connections: int = 2147483648, + # Client related kwargs + db: str | int = 0, + path: str | None = None, + credential_provider: CredentialProvider | None = None, + username: str | None = None, + password: str | None = None, + client_name: str | None = None, + # Encoding related kwargs + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + # Connection related kwargs + health_check_interval: float = 0, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_timeout: float | None = None, + retry: Retry | None = None, + retry_on_error: list[Exception] | None = None, + # SSL related kwargs + ssl: bool = False, + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_cert_reqs: str = "required", + ssl_certfile: str | None = None, + ssl_check_hostname: bool = False, + ssl_keyfile: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> Self: ... + + retry: Retry | None + connection_kwargs: dict[str, Any] + nodes_manager: NodesManager + encoder: Encoder + read_from_replicas: bool + reinitialize_steps: int + cluster_error_retry_attempts: int + reinitialize_counter: int + commands_parser: CommandsParser + node_flags: set[str] + command_flags: dict[str, str] + response_callbacks: Incomplete + result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]] + + def __init__( + self, + host: str | None = None, + port: str | int = 6379, + # Cluster related kwargs + startup_nodes: list[ClusterNode] | None = None, + require_full_coverage: bool = True, + read_from_replicas: bool = False, + reinitialize_steps: int = 5, + cluster_error_retry_attempts: int = 3, + connection_error_retry_attempts: int = 3, + max_connections: int = 2147483648, + # Client related kwargs + db: str | int = 0, + path: str | None = None, + credential_provider: CredentialProvider | None = None, + username: str | None = None, + password: str | None = None, + client_name: str | None = None, + # Encoding related kwargs + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + # Connection related kwargs + health_check_interval: float = 0, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_timeout: float | None = None, + retry: Retry | None = None, + retry_on_error: list[Exception] | None = None, + # SSL related kwargs + ssl: bool = False, + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_cert_reqs: str = "required", + ssl_certfile: str | None = None, + ssl_check_hostname: bool = False, + ssl_keyfile: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> None: ... + async def initialize(self) -> Self: ... + async def close(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __del__(self) -> None: ... + async def on_connect(self, connection: Connection) -> None: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_default_node(self) -> ClusterNode: ... + def set_default_node(self, node: ClusterNode) -> None: ... + def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ... + def get_node_from_key(self, key: str, replica: bool = False) -> ClusterNode | None: ... + def keyslot(self, key: EncodableT) -> int: ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any | None]: ... + def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ... + async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ... + def pipeline(self, transaction: Any | None = None, shard_hint: Any | None = None) -> ClusterPipeline[_StrType]: ... + +class ClusterNode: + host: str + port: str | int + name: str + server_type: str | None + max_connections: int + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + response_callbacks: dict[Incomplete, Incomplete] + def __init__( + self, + host: str, + port: str | int, + server_type: str | None = None, + *, + max_connections: int = 2147483648, + connection_class: type[Connection] = ..., + **connection_kwargs: Any, + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + async def disconnect(self) -> None: ... + def acquire_connection(self) -> Connection: ... + async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ... + async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ... + async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ... + +class NodesManager: + startup_nodes: dict[str, ClusterNode] + require_full_coverage: bool + connection_kwargs: dict[str, Any] + default_node: ClusterNode | None + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[int, list[ClusterNode]] + read_load_balancer: LoadBalancer + address_remap: Callable[[str, int], tuple[str, int]] | None + def __init__( + self, + startup_nodes: list[ClusterNode], + require_full_coverage: bool, + connection_kwargs: dict[str, Any], + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + ) -> None: ... + def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ... + def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = False) -> None: ... + def get_node_from_slot(self, slot: int, read_from_replicas: bool = False) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + async def initialize(self) -> None: ... + async def close(self, attr: str = "nodes_cache") -> None: ... + def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ... + +class ClusterPipeline(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]): # TODO: AsyncValkeyClusterCommands + def __init__(self, client: ValkeyCluster[_StrType]) -> None: ... + async def initialize(self) -> Self: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __await__(self) -> Awaitable[Self]: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __bool__(self) -> bool: ... + def __len__(self) -> int: ... + def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ... + async def execute(self, raise_on_error: bool = True, allow_redirections: bool = True) -> list[Any]: ... + def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ... + +class PipelineCommand: + args: Any + kwargs: Any + position: int + result: Exception | None | Any + def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ... diff --git a/valkey/asyncio/connection.pyi b/valkey/asyncio/connection.pyi new file mode 100644 index 00000000..b0525ffd --- /dev/null +++ b/valkey/asyncio/connection.pyi @@ -0,0 +1,363 @@ +import asyncio +import enum +import ssl +from _typeshed import Unused +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping +from types import MappingProxyType +from typing import Any, Final, Generic, Literal, Protocol, TypedDict, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from valkey.asyncio.retry import Retry +from valkey.credentials import CredentialProvider +from valkey.exceptions import AuthenticationError, ValkeyError, ResponseError +from valkey.typing import EncodableT, EncodedT + +_SSLVerifyMode: TypeAlias = Literal["none", "optional", "required"] + +SYM_STAR: Final[bytes] +SYM_DOLLAR: Final[bytes] +SYM_CRLF: Final[bytes] +SYM_LF: Final[bytes] +SYM_EMPTY: Final[bytes] + +SERVER_CLOSED_CONNECTION_ERROR: Final[str] + +class _Sentinel(enum.Enum): + sentinel = object() + +SENTINEL: Final[object] +MODULE_LOAD_ERROR: Final[str] +NO_SUCH_MODULE_ERROR: Final[str] +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: Final[str] +MODULE_EXPORTS_DATA_TYPES_ERROR: Final[str] +NO_AUTH_SET_ERROR: Final[dict[str, type[AuthenticationError]]] + +class Encoder: + encoding: str + encoding_errors: str + decode_responses: bool + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: EncodableT) -> EncodedT: ... + def decode(self, value: EncodableT, force: bool = False) -> EncodableT: ... + +ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]] + +class BaseParser: + EXCEPTION_CLASSES: ExceptionMappingT + def __init__(self, socket_read_size: int) -> None: ... + @classmethod + def parse_error(cls, response: str) -> ResponseError: ... + @abstractmethod + def on_disconnect(self) -> None: ... + @abstractmethod + def on_connect(self, connection: AbstractConnection) -> None: ... + @abstractmethod + async def can_read_destructive(self) -> bool: ... + @abstractmethod + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ... + +class PythonParser(BaseParser): + encoder: Encoder | None + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + def on_disconnect(self) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | None: ... + +class LibvalkeyParser(BaseParser): + def __init__(self, socket_read_size: int) -> None: ... + def on_connect(self, connection: AbstractConnection) -> None: ... + def on_disconnect(self) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_from_socket(self) -> Literal[True]: ... + async def read_response(self, disable_decoding: bool = False) -> EncodableT | list[EncodableT]: ... + +DefaultParser: type[PythonParser | LibvalkeyParser] + +class ConnectCallbackProtocol(Protocol): + def __call__(self, connection: Connection): ... + +class AsyncConnectCallbackProtocol(Protocol): + async def __call__(self, connection: Connection): ... + +ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol + +class AbstractConnection: + pid: int + db: str | int + client_name: str | None + credential_provider: CredentialProvider | None + password: str | None + username: str | None + socket_timeout: float | None + socket_connect_timeout: float | None + retry_on_timeout: bool + retry_on_error: list[type[Exception]] + retry: Retry + health_check_interval: float + next_health_check: float + encoder: Encoder + valkey_connect_func: ConnectCallbackT | None + + def __init__( + self, + *, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @abstractmethod + def repr_pieces(self) -> list[tuple[str, Any]]: ... + @property + def is_connected(self) -> bool: ... + def register_connect_callback(self, callback: ConnectCallbackT) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class: type[BaseParser]) -> None: ... + async def connect(self) -> None: ... + async def on_connect(self) -> None: ... + async def disconnect(self, nowait: bool = False) -> None: ... + async def check_health(self) -> None: ... + async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = True) -> None: ... + async def send_command(self, *args: Any, **kwargs: Any) -> None: ... + async def can_read_destructive(self) -> bool: ... + async def read_response( + self, disable_decoding: bool = False, timeout: float | None = None, *, disconnect_on_error: bool = True + ) -> EncodableT | list[EncodableT] | None: ... + def pack_command(self, *args: EncodableT) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ... + +class Connection(AbstractConnection): + host: str + port: int + socket_keepalive: bool + socket_keepalive_options: Mapping[int, int | bytes] | None + socket_type: int + + def __init__( + self, + *, + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + # **kwargs forwarded to AbstractConnection. + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +class SSLConnection(Connection): + ssl_context: ValkeySSLContext + def __init__( + self, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: _SSLVerifyMode = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: str | None = None, + ssl_check_hostname: bool = False, + *, + # **kwargs forwarded to Connection. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @property + def keyfile(self) -> str | None: ... + @property + def certfile(self) -> str | None: ... + @property + def cert_reqs(self) -> ssl.VerifyMode: ... + @property + def ca_certs(self) -> str | None: ... + @property + def ca_data(self) -> str | None: ... + @property + def check_hostname(self) -> bool: ... + +class ValkeySSLContext: + keyfile: str | None + certfile: str | None + cert_reqs: ssl.VerifyMode + ca_certs: str | None + ca_data: str | None + check_hostname: bool + context: ssl.SSLContext | None + def __init__( + self, + keyfile: str | None = None, + certfile: str | None = None, + cert_reqs: _SSLVerifyMode | None = None, + ca_certs: str | None = None, + ca_data: str | None = None, + check_hostname: bool = False, + ) -> None: ... + def get(self) -> ssl.SSLContext: ... + +class UnixDomainSocketConnection(Connection): + path: str + def __init__( + self, + *, + path: str = "", + # **kwargs forwarded to AbstractConnection. + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +FALSE_STRINGS: Final[tuple[str, ...]] + +def to_bool(value: object) -> bool | None: ... + +URL_QUERY_ARGUMENT_PARSERS: MappingProxyType[str, Callable[[str], Any]] + +class ConnectKwargs(TypedDict): + username: str + password: str + connection_class: type[AbstractConnection] + host: str + port: int + db: int + path: str + +def parse_url(url: str) -> ConnectKwargs: ... + +_ConnectionT = TypeVar("_ConnectionT", bound=AbstractConnection) + +class ConnectionPool(Generic[_ConnectionT]): + # kwargs accepts all arguments from the connection class chosen for + # the given URL, except those encoded in the URL itself. + @classmethod + def from_url(cls, url: str, **kwargs: Any) -> Self: ... + + connection_class: type[_ConnectionT] + connection_kwargs: Mapping[str, Any] + max_connections: int + encoder_class: type[Encoder] + pid: int + + @overload + def __init__( + self: ConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + connection_class: type[_ConnectionT], + max_connections: int | None = None, + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__(self: ConnectionPool[Connection], *, max_connections: int | None = None, **connection_kwargs) -> None: ... + def reset(self) -> None: ... + async def get_connection(self, command_name: Unused, *keys: Unused, **options: Unused) -> _ConnectionT: ... + def get_encoder(self) -> Encoder: ... + def make_connection(self) -> _ConnectionT: ... + async def release(self, connection: AbstractConnection) -> None: ... + def owns_connection(self, connection: AbstractConnection) -> bool: ... + async def disconnect(self, inuse_connections: bool = True) -> None: ... + def set_retry(self, retry: Retry) -> None: ... + +class BlockingConnectionPool(ConnectionPool[_ConnectionT]): + queue_class: type[asyncio.Queue[_ConnectionT | None]] + timeout: int | None + pool: asyncio.Queue[_ConnectionT | None] + + @overload + def __init__( + self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + max_connections: int, + timeout: int | None, + connection_class: type[_ConnectionT], + queue_class: type[asyncio.Queue[_ConnectionT | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: BlockingConnectionPool[_ConnectionT], # pyright: ignore[reportInvalidTypeVarUse] #11780 + max_connections: int = 50, + timeout: int | None = 20, + *, + connection_class: type[_ConnectionT], + queue_class: type[asyncio.Queue[_ConnectionT | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: BlockingConnectionPool[Connection], + max_connections: int = 50, + timeout: int | None = 20, + *, + queue_class: type[asyncio.Queue[Connection | None]] = ..., + # **kwargs are passed to the constructed connection instances. + **connection_kwargs: Any, + ) -> None: ... diff --git a/valkey/asyncio/lock.pyi b/valkey/asyncio/lock.pyi new file mode 100644 index 00000000..018591c7 --- /dev/null +++ b/valkey/asyncio/lock.pyi @@ -0,0 +1,51 @@ +import threading +from collections.abc import Awaitable +from types import SimpleNamespace, TracebackType +from typing import Any, ClassVar +from typing_extensions import Self + +from valkey.asyncio import Valkey +from valkey.commands.core import AsyncScript + +class Lock: + lua_release: ClassVar[AsyncScript | None] + lua_extend: ClassVar[AsyncScript | None] + lua_reacquire: ClassVar[AsyncScript | None] + LUA_RELEASE_SCRIPT: ClassVar[str] + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + valkey: Valkey[Any] + name: str | bytes | memoryview + timeout: float | None + sleep: float + blocking: bool + blocking_timeout: float | None + thread_local: bool + local: threading.local | SimpleNamespace + def __init__( + self, + valkey: Valkey[Any], + name: str | bytes | memoryview, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + thread_local: bool = True, + ) -> None: ... + def register_scripts(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + async def acquire( + self, blocking: bool | None = None, blocking_timeout: float | None = None, token: str | bytes | None = None + ) -> bool: ... + async def do_acquire(self, token: str | bytes) -> bool: ... + async def locked(self) -> bool: ... + async def owned(self) -> bool: ... + def release(self) -> Awaitable[None]: ... + async def do_release(self, expected_token: bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = False) -> Awaitable[bool]: ... + async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> Awaitable[bool]: ... + async def do_reacquire(self) -> bool: ... diff --git a/valkey/asyncio/parser.pyi b/valkey/asyncio/parser.pyi new file mode 100644 index 00000000..fe5139a8 --- /dev/null +++ b/valkey/asyncio/parser.pyi @@ -0,0 +1,9 @@ +from _typeshed import Incomplete +from typing import Any + +# TODO: define and use: +# from valkey.asyncio.cluster import ClusterNode + +class CommandsParser: + async def initialize(self, node: Incomplete | None = None) -> None: ... # TODO: ClusterNode + async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ... diff --git a/valkey/asyncio/retry.pyi b/valkey/asyncio/retry.pyi new file mode 100644 index 00000000..0970df7b --- /dev/null +++ b/valkey/asyncio/retry.pyi @@ -0,0 +1,12 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import TypeVar + +from valkey.backoff import AbstractBackoff +from valkey.exceptions import ValkeyError + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[ValkeyError], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[ValkeyError]]) -> None: ... + async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[ValkeyError], Awaitable[object]]) -> _T: ... diff --git a/valkey/asyncio/sentinel.pyi b/valkey/asyncio/sentinel.pyi new file mode 100644 index 00000000..1fa9e5fa --- /dev/null +++ b/valkey/asyncio/sentinel.pyi @@ -0,0 +1,162 @@ +from collections.abc import AsyncIterator, Iterable, Mapping +from typing import Any, Literal, TypedDict, TypeVar, overload + +from valkey.asyncio.client import Valkey +from valkey.asyncio.connection import ( + BaseParser, + ConnectCallbackT, + Connection, + ConnectionPool, + Encoder, + SSLConnection, + _ConnectionT, + _Sentinel, +) +from valkey.asyncio.retry import Retry +from valkey.commands import AsyncSentinelCommands +from valkey.credentials import CredentialProvider +from valkey.exceptions import ConnectionError, ValkeyError + +_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any]) + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: ConnectionPool[Any] | None + def __init__( + self, + *, + connection_pool: ConnectionPool[Any] | None, + # **kwargs forwarded to Connection. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + async def connect_to(self, address: tuple[str, int]) -> None: ... + async def connect(self) -> None: ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool[_ConnectionT]): + is_master: bool + check_connection: bool + service_name: str + sentinel_manager: Sentinel + master_address: tuple[str, int] | None + slave_rr_counter: int | None + + def __init__( + self, + service_name: str, + sentinel_manager: Sentinel, + *, + ssl: bool = False, + connection_class: type[SentinelManagedConnection] = ..., + is_master: bool = True, + check_connection: bool = False, + # **kwargs ultimately forwarded to construction Connection instances. + host: str = "localhost", + port: str | int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[int, int | bytes] | None = None, + socket_type: int = 0, + db: str | int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | _Sentinel = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: ConnectCallbackT | None = None, + encoder_class: type[Encoder] = ..., + credential_provider: CredentialProvider | None = None, + ) -> None: ... + async def get_master_address(self) -> tuple[str, int]: ... + async def rotate_slaves(self) -> AsyncIterator[tuple[str, int]]: ... + +_State = TypedDict( + "_State", {"ip": str, "port": int, "is_master": bool, "is_sdown": bool, "is_odown": bool, "num-other-sentinels": int} +) + +class Sentinel(AsyncSentinelCommands): + sentinel_kwargs: Mapping[str, Any] + sentinels: list[Valkey[Any]] + min_other_sentinels: int + connection_kwargs: Mapping[str, Any] + def __init__( + self, + sentinels: Iterable[tuple[str, int]], + min_other_sentinels: int = 0, + sentinel_kwargs: Mapping[str, Any] | None = None, + **connection_kwargs: Any, + ) -> None: ... + async def execute_command(self, *args: Any, once: bool = False, **kwargs: Any) -> Literal[True]: ... + def check_master_state(self, state: _State, service_name: str) -> bool: ... + async def discover_master(self, service_name: str) -> tuple[str, int]: ... + def filter_slaves(self, slaves: Iterable[_State]) -> list[tuple[str, int]]: ... + async def discover_slaves(self, service_name: str) -> list[tuple[str, int]]: ... + @overload + def master_for( + self, + service_name: str, + valkey_class: type[_ValkeyT], + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> _ValkeyT: ... + @overload + def master_for( + self, + service_name: str, + *, + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> Valkey[Any]: ... + @overload + def slave_for( + self, + service_name: str, + valkey_class: type[_ValkeyT], + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> _ValkeyT: ... + @overload + def slave_for( + self, + service_name: str, + *, + connection_pool_class: type[SentinelConnectionPool[Any]] = ..., + # Forwarded to the connection pool constructor. + **kwargs: Any, + ) -> Valkey[Any]: ... diff --git a/valkey/asyncio/utils.pyi b/valkey/asyncio/utils.pyi new file mode 100644 index 00000000..cd3b14df --- /dev/null +++ b/valkey/asyncio/utils.pyi @@ -0,0 +1,15 @@ +from types import TracebackType +from typing import Any, Generic + +from valkey.asyncio.client import Pipeline, Valkey +from valkey.client import _StrType + +def from_url(url: str, **kwargs) -> Valkey[Any]: ... + +class pipeline(Generic[_StrType]): + p: Pipeline[_StrType] + def __init__(self, valkey_obj: Valkey[_StrType]) -> None: ... + async def __aenter__(self) -> Pipeline[_StrType]: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/valkey/backoff.pyi b/valkey/backoff.pyi new file mode 100644 index 00000000..40230a13 --- /dev/null +++ b/valkey/backoff.pyi @@ -0,0 +1,31 @@ +from abc import ABC, abstractmethod + +class AbstractBackoff(ABC): + def reset(self) -> None: ... + @abstractmethod + def compute(self, failures: int) -> float: ... + +class ConstantBackoff(AbstractBackoff): + def __init__(self, backoff: int) -> None: ... + def compute(self, failures: int) -> float: ... + +class NoBackoff(ConstantBackoff): + def __init__(self) -> None: ... + +class ExponentialBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class FullJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class EqualJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +class DecorrelatedJitterBackoff(AbstractBackoff): + def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ... + def compute(self, failures: int) -> float: ... + +def default_backoff() -> EqualJitterBackoff: ... diff --git a/valkey/client.pyi b/valkey/client.pyi new file mode 100644 index 00000000..b9ad6a83 --- /dev/null +++ b/valkey/client.pyi @@ -0,0 +1,799 @@ +import threading +from _typeshed import Incomplete, SupportsItems, Unused +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from re import Pattern +from types import TracebackType +from typing import Any, ClassVar, Literal, TypeVar, overload +from typing_extensions import Self, TypeAlias + +from valkey import ValkeyError + +from .commands import CoreCommands, ValkeyModuleCommands, SentinelCommands +from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions +from .credentials import CredentialProvider +from .lock import Lock +from .retry import Retry +from .typing import ChannelT, EncodableT, KeyT, PatternT + +_Value: TypeAlias = bytes | float | int | str +_Key: TypeAlias = str | bytes + +# Lib returns str or bytes depending on value of decode_responses +_StrType = TypeVar("_StrType", bound=str | bytes) + +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +# Keyword arguments that are passed to Valkey.parse_response(). +_ParseResponseOptions: TypeAlias = Any +# Keyword arguments that are passed to Valkey.execute_command(). +_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions + +SYM_EMPTY: bytes +EMPTY_RESPONSE: str +NEVER_DECODE: str + +class CaseInsensitiveDict(dict[_StrType, _VT]): + def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ... + def update(self, data: SupportsItems[_StrType, _VT]) -> None: ... # type: ignore[override] + @overload + def get(self, k: _StrType, default: None = None) -> _VT | None: ... + @overload + def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ... + # Overrides many other methods too, but without changing signature + +def list_or_args(keys, args): ... +def timestamp_to_datetime(response): ... +def string_keys_to_dict(key_string, callback): ... +def parse_debug_object(response): ... +def parse_object(response, infotype): ... +def parse_info(response): ... + +SENTINEL_STATE_TYPES: dict[str, type[int]] + +def parse_sentinel_state(item): ... +def parse_sentinel_master(response): ... +def parse_sentinel_masters(response): ... +def parse_sentinel_slaves_and_sentinels(response): ... +def parse_sentinel_get_master(response): ... +def pairs_to_dict(response, decode_keys: bool = False, decode_string_values: bool = False): ... +def pairs_to_dict_typed(response, type_info): ... +def zset_score_pairs(response, **options): ... +def sort_return_tuples(response, **options): ... +def int_or_none(response): ... +def float_or_none(response): ... +def bool_ok(response): ... +def parse_client_list(response, **options): ... +def parse_config_get(response, **options): ... +def parse_scan(response, **options): ... +def parse_hscan(response, **options): ... +def parse_zscan(response, **options): ... +def parse_slowlog_get(response, **options): ... + +_LockType = TypeVar("_LockType") + +class AbstractValkey: + RESPONSE_CALLBACKS: dict[str, Any] + +class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], SentinelCommands): + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[True], + retry_on_timeout: bool = ..., + retry_on_error: list[type[ValkeyError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + ) -> Valkey[str]: ... + @overload + @classmethod + def from_url( + cls, + url: str, + *, + host: str | None = ..., + port: int | None = ..., + db: int | None = ..., + password: str | None = ..., + socket_timeout: float | None = ..., + socket_connect_timeout: float | None = ..., + socket_keepalive: bool | None = ..., + socket_keepalive_options: Mapping[str, int | str] | None = ..., + connection_pool: ConnectionPool | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., + decode_responses: Literal[False] = False, + retry_on_timeout: bool = ..., + retry_on_error: list[type[ValkeyError]] | None = ..., + ssl: bool = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., + ssl_cert_reqs: str | int | None = ..., + ssl_ca_certs: str | None = ..., + ssl_check_hostname: bool = ..., + max_connections: int | None = ..., + single_connection_client: bool = ..., + health_check_interval: float = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + ) -> Valkey[bytes]: ... + connection_pool: Any + response_callbacks: Any + @overload + def __init__( + self: Valkey[str], + host: str, + port: int, + db: int, + password: str | None, + socket_timeout: float | None, + socket_connect_timeout: float | None, + socket_keepalive: bool | None, + socket_keepalive_options: Mapping[str, int | str] | None, + connection_pool: ConnectionPool | None, + unix_socket_path: str | None, + encoding: str, + encoding_errors: str, + charset: str | None, + errors: str | None, + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_path: Incomplete | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[str], + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[str, int | str] | None = None, + connection_pool: ConnectionPool | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + charset: str | None = None, + errors: str | None = None, + *, + decode_responses: Literal[True], + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + @overload + def __init__( + self: Valkey[bytes], + host: str = "localhost", + port: int = 6379, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool | None = None, + socket_keepalive_options: Mapping[str, int | str] | None = None, + connection_pool: ConnectionPool | None = None, + unix_socket_path: str | None = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + charset: str | None = None, + errors: str | None = None, + decode_responses: Literal[False] = False, + retry_on_timeout: bool = False, + retry_on_error: list[type[ValkeyError]] | None = None, + ssl: bool = False, + ssl_keyfile: str | None = None, + ssl_certfile: str | None = None, + ssl_cert_reqs: str | int | None = "required", + ssl_ca_certs: str | None = None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + max_connections: int | None = None, + single_connection_client: bool = False, + health_check_interval: float = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + ) -> None: ... + def get_encoder(self): ... + def get_connection_kwargs(self): ... + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ... + def transaction(self, func, *watches, **kwargs): ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: None = None, + thread_local: bool = True, + ) -> Lock: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None, + sleep: float, + blocking: bool, + blocking_timeout: float | None, + lock_class: type[_LockType], + thread_local: bool = True, + ) -> _LockType: ... + @overload + def lock( + self, + name: _Key, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + *, + lock_class: type[_LockType], + thread_local: bool = True, + ) -> _LockType: ... + def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... + def execute_command(self, *args, **options: _CommandOptions): ... + def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ... + def monitor(self) -> Monitor: ... + def __enter__(self) -> Valkey[_StrType]: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def close(self) -> None: ... + def client(self) -> Valkey[_StrType]: ... + +StrictValkey = Valkey + +class PubSub: + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]] + HEALTH_CHECK_MESSAGE: ClassVar[str] + connection_pool: Any + shard_hint: Any + ignore_subscribe_messages: Any + connection: Any + subscribed_event: threading.Event + encoder: Any + health_check_response_b: bytes + health_check_response: list[str] | list[bytes] + def __init__( + self, + connection_pool, + shard_hint: Incomplete | None = None, + ignore_subscribe_messages: bool = False, + encoder: Incomplete | None = None, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self): ... + channels: Any + patterns: Any + def reset(self): ... + def close(self) -> None: ... + def on_connect(self, connection): ... + @property + def subscribed(self): ... + def execute_command(self, *args): ... + def clean_health_check_responses(self) -> None: ... + def parse_response(self, block: bool = True, timeout: float = 0): ... + def is_health_check_response(self, response) -> bool: ... + def check_health(self) -> None: ... + def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ... + def punsubscribe(self, *args: _Key) -> None: ... + def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ... + def unsubscribe(self, *args: _Key) -> None: ... + def listen(self): ... + def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0) -> dict[str, Any] | None: ... + def handle_message(self, response, ignore_subscribe_messages: bool = False) -> dict[str, Any] | None: ... + def run_in_thread(self, sleep_time: float = 0, daemon: bool = False, exception_handler: Incomplete | None = None): ... + def ping(self, message: _Value | None = None) -> None: ... + +class PubSubWorkerThread(threading.Thread): + daemon: Any + pubsub: Any + sleep_time: Any + exception_handler: Any + def __init__(self, pubsub, sleep_time, daemon: bool = False, exception_handler: Incomplete | None = None) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + +class Pipeline(Valkey[_StrType]): + UNWATCH_COMMANDS: Any + connection_pool: Any + connection: Any + response_callbacks: Any + transaction: bool + shard_hint: Any + watching: bool + + command_stack: Any + scripts: Any + explicit_transaction: Any + def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... + def __enter__(self) -> Pipeline[_StrType]: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def discard(self) -> None: ... + def reset(self) -> None: ... + def multi(self) -> None: ... + def execute_command(self, *args, **options): ... + def immediate_execute_command(self, *args, **options): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, commands, response): ... + def annotate_exception(self, exception, number, command): ... + def parse_response(self, connection, command_name, **options): ... + def load_scripts(self): ... + def execute(self, raise_on_error: bool = True) -> list[Any]: ... + def watch(self, *names: _Key) -> bool: ... + def unwatch(self) -> bool: ... + # in the Valkey implementation, the following methods are inherited from client. + def set_response_callback(self, command, callback): ... + def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ... + def acl_cat(self, category: str | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_deluser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_genpass(self, bits: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_getuser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_setuser( # type: ignore[override] + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> Pipeline[_StrType]: ... + def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgsave(self, schedule: bool = True) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_id(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_kill(self, address: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_list(self, _type: str | None = None, client_id: list[str] = []) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_setname(self, name: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def readonly(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... + def config_set( + self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions + ) -> Pipeline[_StrType]: ... + def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushall(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushdb(self, asynchronous: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ... # type: ignore[override] + def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def ping(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def save(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_masters(self) -> Pipeline[_StrType]: ... + def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... + def sentinel_remove(self, name) -> Pipeline[_StrType]: ... + def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... + def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... + def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... + def slaveof(self, host=None, port=None) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_get(self, num=None) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def time(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def append(self, key, value) -> Pipeline[_StrType]: ... + def bitcount( # type: ignore[override] + self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None + ) -> Pipeline[_StrType]: ... + def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... + def bitpos(self, key, bit, start=None, end=None, mode: str | None = None) -> Pipeline[_StrType]: ... + def decr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __delitem__(self, _Key) -> None: ... + def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def expire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def expireat( + self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __getitem__(self, name) -> Pipeline[_StrType]: ... + def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def getrange(self, key, start, end) -> Pipeline[_StrType]: ... + def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def incr(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrby(self, name, amount=1) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrbyfloat(self, name, amount=1.0) -> Pipeline[_StrType]: ... # type: ignore[override] + def keys(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override] + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpire( # type: ignore[override] + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def pexpireat( # type: ignore[override] + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Pipeline[_StrType]: ... + def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... + def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def rename(self, src, dst) -> Pipeline[_StrType]: ... + def renamenx(self, src, dst) -> Pipeline[_StrType]: ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ) -> Pipeline[_StrType]: ... + def set( # type: ignore[override] + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = None, + px: None | int | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> Pipeline[_StrType]: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... + def strlen(self, name) -> Pipeline[_StrType]: ... + def substr(self, name, start, end=-1) -> Pipeline[_StrType]: ... + def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def type(self, name) -> Pipeline[_StrType]: ... + def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def blmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Pipeline[_StrType]: ... + def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ... + def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def linsert( # type: ignore[override] + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> Pipeline[_StrType]: ... + def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def lmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> Pipeline[_StrType]: ... + def lpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ... + def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpushx(self, name, value) -> Pipeline[_StrType]: ... + def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ... + def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... + def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpushx(self, name, value) -> Pipeline[_StrType]: ... + def sort( # type: ignore[override] + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: _Key | None = None, + groups: bool = False, + ) -> Pipeline[_StrType]: ... + def scan( # type: ignore[override] + self, cursor: int = 0, match: _Key | None = None, count: int | None = None, _type: str | None = None + ) -> Pipeline[_StrType]: ... + def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ... # type: ignore[override] + def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... + def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def hscan_iter(self, name, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ... + def zscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ... + ) -> Iterator[Any]: ... + def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def spop(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def srandmember(self, name: _Key, number: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... + def xadd( + self, + name, + fields, + id="*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: int | None = None, + ) -> Pipeline[_StrType]: ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ) -> Pipeline[_StrType]: ... + def xdel(self, name, *ids) -> Pipeline[_StrType]: ... + def xgroup_create(self, name, groupname, id="$", mkstream=False, entries_read: int | None = None) -> Pipeline[_StrType]: ... + def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... + def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Pipeline[_StrType]: ... + def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... + def xinfo_groups(self, name) -> Pipeline[_StrType]: ... + def xinfo_stream(self, name, full: bool = False) -> Pipeline[_StrType]: ... + def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending(self, name, groupname) -> Pipeline[_StrType]: ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ) -> Pipeline[_StrType]: ... + def xrange(self, name, min="-", max="+", count=None) -> Pipeline[_StrType]: ... + def xread(self, streams, count=None, block=None) -> Pipeline[_StrType]: ... + def xreadgroup(self, groupname, consumername, streams, count=None, block=None, noack=False) -> Pipeline[_StrType]: ... + def xrevrange(self, name, max="+", min="-", count=None) -> Pipeline[_StrType]: ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ) -> Pipeline[_StrType]: ... + def zadd( # type: ignore[override] + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> Pipeline[_StrType]: ... + def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zinterstore( # type: ignore[override] + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> Pipeline[_StrType]: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmax(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmin(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrange( # type: ignore[override] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> Pipeline[_StrType]: ... + def zrangebylex( # type: ignore[override] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> Pipeline[_StrType]: ... + def zrangebyscore( # type: ignore[override] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrevrange( # type: ignore[override] + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[_StrType], Any] = ... + ) -> Pipeline[_StrType]: ... + def zrevrangebyscore( # type: ignore[override] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> Pipeline[_StrType]: ... + def zrevrangebylex( # type: ignore[override] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> Pipeline[_StrType]: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ... # type: ignore[override] + def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zunionstore( # type: ignore[override] + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> Pipeline[_StrType]: ... + def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Pipeline[_StrType]: ... # type: ignore[override] + def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + @overload # type: ignore[override] + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> Pipeline[_StrType]: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> Pipeline[_StrType]: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Pipeline[_StrType]: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... + def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... + def script_exists(self, *args) -> Pipeline[_StrType]: ... + def script_flush(self, sync_type: Incomplete | None = None) -> Pipeline[_StrType]: ... + def script_kill(self) -> Pipeline[_StrType]: ... + def script_load(self, script) -> Pipeline[_StrType]: ... + def pubsub_channels(self, pattern: _Key = "*") -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def monitor(self) -> Monitor: ... + def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore[override] + def client(self) -> Any: ... + +class Monitor: + command_re: Pattern[str] + monitor_re: Pattern[str] + def __init__(self, connection_pool) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def next_command(self) -> dict[str, Any]: ... + def listen(self) -> Iterable[dict[str, Any]]: ... diff --git a/valkey/cluster.pyi b/valkey/cluster.pyi new file mode 100644 index 00000000..6758c05e --- /dev/null +++ b/valkey/cluster.pyi @@ -0,0 +1,265 @@ +from _typeshed import Incomplete, Unused +from collections.abc import Callable, Iterable, Sequence +from threading import Lock +from types import TracebackType +from typing import Any, ClassVar, Literal, NoReturn, Protocol +from typing_extensions import Self + +from redis.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions +from redis.commands import CommandsParser, ValkeyClusterCommands +from redis.commands.core import _StrType +from redis.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable +from redis.exceptions import MovedError, ValkeyError +from redis.retry import Retry +from redis.typing import EncodableT + +def get_node_name(host: str, port: str | int) -> str: ... +def get_connection(redis_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ... +def parse_scan_result(command: Unused, res, **options): ... +def parse_pubsub_numsub(command: Unused, res, **options: Unused): ... +def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ... +def parse_cluster_myshardid(resp: bytes, **options: Unused) -> str: ... + +PRIMARY: str +REPLICA: str +SLOT_ID: str +VALKEY_ALLOWED_KEYS: tuple[str, ...] +KWARGS_DISABLED_KEYS: tuple[str, ...] +PIPELINE_BLOCKED_COMMANDS: tuple[str, ...] + +def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ... + +# It uses `DefaultParser` in real life, but it is a dynamic base class. +class ClusterParser(BaseParser): ... + +class AbstractValkeyCluster: + ValkeyClusterRequestTTL: ClassVar[int] + PRIMARIES: ClassVar[str] + REPLICAS: ClassVar[str] + ALL_NODES: ClassVar[str] + RANDOM: ClassVar[str] + DEFAULT_NODE: ClassVar[str] + NODE_FLAGS: ClassVar[set[str]] + COMMAND_FLAGS: ClassVar[dict[str, str]] + CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]] + RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]] + ERRORS_ALLOW_RETRY: ClassVar[tuple[type[ValkeyError], ...]] + +class ValkeyCluster(AbstractValkeyCluster, ValkeyClusterCommands[_StrType]): + user_on_connect_func: Callable[[Connection], object] | None + encoder: Encoder + cluster_error_retry_attempts: int + command_flags: dict[str, str] + node_flags: set[str] + read_from_replicas: bool + reinitialize_counter: int + reinitialize_steps: int + nodes_manager: NodesManager + cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]] + result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]] + commands_parser: CommandsParser + def __init__( # TODO: make @overloads, either `url` or `host:port` can be passed + self, + host: str | None = None, + port: int | None = 6379, + startup_nodes: list[ClusterNode] | None = None, + cluster_error_retry_attempts: int = 3, + retry: Retry | None = None, + require_full_coverage: bool = False, + reinitialize_steps: int = 5, + read_from_replicas: bool = False, + dynamic_startup_nodes: bool = True, + url: str | None = None, + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + **kwargs, + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def __del__(self) -> None: ... + def disconnect_connection_pools(self) -> None: ... + @classmethod + def from_url(cls, url: str, **kwargs) -> Self: ... + def on_connect(self, connection: Connection) -> None: ... + def get_redis_connection(self, node: ClusterNode) -> Valkey[Any]: ... + def get_node( + self, host: str | None = None, port: str | int | None = None, node_name: str | None = None + ) -> ClusterNode | None: ... + def get_primaries(self) -> list[ClusterNode]: ... + def get_replicas(self) -> list[ClusterNode]: ... + def get_random_node(self) -> ClusterNode: ... + def get_nodes(self) -> list[ClusterNode]: ... + def get_node_from_key(self, key: _Encodable, replica: bool = False) -> ClusterNode | None: ... + def get_default_node(self) -> ClusterNode | None: ... + def set_default_node(self, node: ClusterNode | None) -> bool: ... + def monitor(self, target_node: Incomplete | None = None): ... + def pubsub( + self, node: Incomplete | None = None, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs + ): ... + def pipeline(self, transaction: Incomplete | None = None, shard_hint: Incomplete | None = None): ... + def lock( + self, + name: str, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + lock_class: type[Incomplete] | None = None, + thread_local: bool = True, + ): ... + def keyslot(self, key: _Encodable) -> int: ... + def determine_slot(self, *args): ... + def get_encoder(self) -> Encoder: ... + def get_connection_kwargs(self) -> dict[str, Any]: ... + def execute_command(self, *args, **kwargs): ... + def close(self) -> None: ... + +class ClusterNode: + host: str + port: int + name: str + server_type: str | None + redis_connection: Valkey[Incomplete] | None + def __init__( + self, host: str, port: int, server_type: str | None = None, redis_connection: Valkey[Incomplete] | None = None + ) -> None: ... + def __eq__(self, obj: object) -> bool: ... + def __del__(self) -> None: ... + +class LoadBalancer: + primary_to_idx: dict[str, int] + start_index: int + def __init__(self, start_index: int = 0) -> None: ... + def get_server_index(self, primary: str, list_size: int) -> int: ... + def reset(self) -> None: ... + +class NodesManager: + nodes_cache: dict[str, ClusterNode] + slots_cache: dict[str, list[ClusterNode]] + startup_nodes: dict[str, ClusterNode] + default_node: ClusterNode | None + from_url: bool + connection_pool_class: type[ConnectionPool] + connection_kwargs: dict[str, Incomplete] # TODO: could be a TypedDict + read_load_balancer: LoadBalancer + address_remap: Callable[[str, int], tuple[str, int]] | None + def __init__( + self, + startup_nodes: Iterable[ClusterNode], + from_url: bool = False, + require_full_coverage: bool = False, + lock: Lock | None = None, + dynamic_startup_nodes: bool = True, + connection_pool_class: type[ConnectionPool] = ..., + address_remap: Callable[[str, int], tuple[str, int]] | None = None, + **kwargs, # TODO: same type as connection_kwargs + ) -> None: ... + def get_node( + self, host: str | None = None, port: int | str | None = None, node_name: str | None = None + ) -> ClusterNode | None: ... + def update_moved_exception(self, exception: MovedError) -> None: ... + def get_node_from_slot(self, slot: str, read_from_replicas: bool = False, server_type: str | None = None) -> ClusterNode: ... + def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ... + def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ... + def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ... + def create_redis_connections(self, nodes: Iterable[ClusterNode]) -> None: ... + def create_redis_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ... + def initialize(self) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ... + +class ClusterPubSub(PubSub): + node: ClusterNode | None + cluster: ValkeyCluster[Any] + def __init__( + self, + redis_cluster: ValkeyCluster[Any], + node: ClusterNode | None = None, + host: str | None = None, + port: int | None = None, + **kwargs, + ) -> None: ... + def set_pubsub_node( + self, cluster: ValkeyCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None + ) -> None: ... + def get_pubsub_node(self) -> ClusterNode | None: ... + def execute_command(self, *args, **kwargs) -> None: ... + def get_redis_connection(self) -> Valkey[Any] | None: ... + +class ClusterPipeline(ValkeyCluster[_StrType]): + command_stack: list[Incomplete] + nodes_manager: Incomplete + refresh_table_asap: bool + result_callbacks: Incomplete + startup_nodes: Incomplete + read_from_replicas: bool + command_flags: Incomplete + cluster_response_callbacks: Incomplete + cluster_error_retry_attempts: int + reinitialize_counter: int + reinitialize_steps: int + encoder: Encoder + commands_parser: Incomplete + def __init__( + self, + nodes_manager, + commands_parser, + result_callbacks: Incomplete | None = None, + cluster_response_callbacks: Incomplete | None = None, + startup_nodes: Incomplete | None = None, + read_from_replicas: bool = False, + cluster_error_retry_attempts: int = 3, + reinitialize_steps: int = 5, + lock: Lock | None = None, + **kwargs, + ) -> None: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def execute_command(self, *args, **kwargs): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, stack) -> None: ... + def annotate_exception(self, exception, number, command) -> None: ... + def execute(self, raise_on_error: bool = True): ... + scripts: set[Any] # is only set in `reset()` + watching: bool # is only set in `reset()` + explicit_transaction: bool # is only set in `reset()` + def reset(self) -> None: ... + def send_cluster_commands(self, stack, raise_on_error: bool = True, allow_redirections: bool = True): ... + def eval(self) -> None: ... + def multi(self) -> None: ... + def immediate_execute_command(self, *args, **options) -> None: ... + def load_scripts(self) -> None: ... + def watch(self, *names) -> None: ... + def unwatch(self) -> None: ... + def script_load_for_pipeline(self, *args, **kwargs) -> None: ... + def delete(self, *names): ... + +def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ... + +class PipelineCommand: + args: Sequence[EncodableT] + options: _ParseResponseOptions + position: int | None + result: Any | Exception | None + node: Incomplete | None + asking: bool + def __init__( + self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = None, position: int | None = None + ) -> None: ... + +class _ParseResponseCallback(Protocol): + def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ... + +class NodeCommands: + parse_response: _ParseResponseCallback + connection_pool: ConnectionPool + connection: Connection + commands: list[PipelineCommand] + def __init__( + self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection + ) -> None: ... + def append(self, c: PipelineCommand) -> None: ... + def write(self) -> None: ... + def read(self) -> None: ... diff --git a/valkey/commands/__init__.pyi b/valkey/commands/__init__.pyi new file mode 100644 index 00000000..1abccc40 --- /dev/null +++ b/valkey/commands/__init__.pyi @@ -0,0 +1,17 @@ +from .cluster import ValkeyClusterCommands as ValkeyClusterCommands +from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands +from .helpers import list_or_args as list_or_args +from .parser import CommandsParser as CommandsParser +from .valkeymodules import ValkeyModuleCommands as ValkeyModuleCommands +from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands + +__all__ = [ + "ValkeyClusterCommands", + "CommandsParser", + "AsyncCoreCommands", + "CoreCommands", + "list_or_args", + "ValkeyModuleCommands", + "AsyncSentinelCommands", + "SentinelCommands", +] diff --git a/valkey/commands/bf/__init__.pyi b/valkey/commands/bf/__init__.pyi new file mode 100644 index 00000000..d5ef70ee --- /dev/null +++ b/valkey/commands/bf/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .commands import * +from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo + +class AbstractBloom: + @staticmethod + def append_items(params, items) -> None: ... + @staticmethod + def append_error(params, error) -> None: ... + @staticmethod + def append_capacity(params, capacity) -> None: ... + @staticmethod + def append_expansion(params, expansion) -> None: ... + @staticmethod + def append_no_scale(params, noScale) -> None: ... + @staticmethod + def append_weights(params, weights) -> None: ... + @staticmethod + def append_no_create(params, noCreate) -> None: ... + @staticmethod + def append_items_and_increments(params, items, increments) -> None: ... + @staticmethod + def append_values_and_weights(params, items, weights) -> None: ... + @staticmethod + def append_max_iterations(params, max_iterations) -> None: ... + @staticmethod + def append_bucket_size(params, bucket_size) -> None: ... + +class CMSBloom(CMSCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TOPKBloom(TOPKCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class CFBloom(CFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TDigestBloom(TDigestCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class BFBloom(BFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... diff --git a/valkey/commands/bf/commands.pyi b/valkey/commands/bf/commands.pyi new file mode 100644 index 00000000..99a296fd --- /dev/null +++ b/valkey/commands/bf/commands.pyi @@ -0,0 +1,112 @@ +from _typeshed import Incomplete + +BF_RESERVE: str +BF_ADD: str +BF_MADD: str +BF_INSERT: str +BF_EXISTS: str +BF_MEXISTS: str +BF_SCANDUMP: str +BF_LOADCHUNK: str +BF_INFO: str +CF_RESERVE: str +CF_ADD: str +CF_ADDNX: str +CF_INSERT: str +CF_INSERTNX: str +CF_EXISTS: str +CF_DEL: str +CF_COUNT: str +CF_SCANDUMP: str +CF_LOADCHUNK: str +CF_INFO: str +CMS_INITBYDIM: str +CMS_INITBYPROB: str +CMS_INCRBY: str +CMS_QUERY: str +CMS_MERGE: str +CMS_INFO: str +TOPK_RESERVE: str +TOPK_ADD: str +TOPK_INCRBY: str +TOPK_QUERY: str +TOPK_COUNT: str +TOPK_LIST: str +TOPK_INFO: str +TDIGEST_CREATE: str +TDIGEST_RESET: str +TDIGEST_ADD: str +TDIGEST_MERGE: str +TDIGEST_CDF: str +TDIGEST_QUANTILE: str +TDIGEST_MIN: str +TDIGEST_MAX: str +TDIGEST_INFO: str + +class BFCommands: + def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ... + def add(self, key, item): ... + def madd(self, key, *items): ... + def insert( + self, + key, + items, + capacity: Incomplete | None = None, + error: Incomplete | None = None, + noCreate: Incomplete | None = None, + expansion: Incomplete | None = None, + noScale: Incomplete | None = None, + ): ... + def exists(self, key, item): ... + def mexists(self, key, *items): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class CFCommands: + def create( + self, + key, + capacity, + expansion: Incomplete | None = None, + bucket_size: Incomplete | None = None, + max_iterations: Incomplete | None = None, + ): ... + def add(self, key, item): ... + def addnx(self, key, item): ... + def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ... + def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ... + def exists(self, key, item): ... + def delete(self, key, item): ... + def count(self, key, item): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class TOPKCommands: + def reserve(self, key, k, width, depth, decay): ... + def add(self, key, *items): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def count(self, key, *items): ... + def list(self, key, withcount: bool = False): ... + def info(self, key): ... + +class TDigestCommands: + def create(self, key, compression: int = 100): ... + def reset(self, key): ... + def add(self, key, values): ... + def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ... + def min(self, key): ... + def max(self, key): ... + def quantile(self, key, quantile, *quantiles): ... + def cdf(self, key, value, *values): ... + def info(self, key): ... + +class CMSCommands: + def initbydim(self, key, width, depth): ... + def initbyprob(self, key, error, probability): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def merge(self, destKey, numKeys, srcKeys, weights=[]): ... + def info(self, key): ... diff --git a/valkey/commands/bf/info.pyi b/valkey/commands/bf/info.pyi new file mode 100644 index 00000000..54d1cf04 --- /dev/null +++ b/valkey/commands/bf/info.pyi @@ -0,0 +1,43 @@ +from typing import Any + +class BFInfo: + capacity: Any + size: Any + filterNum: Any + insertedNum: Any + expansionRate: Any + def __init__(self, args) -> None: ... + +class CFInfo: + size: Any + bucketNum: Any + filterNum: Any + insertedNum: Any + deletedNum: Any + bucketSize: Any + expansionRate: Any + maxIteration: Any + def __init__(self, args) -> None: ... + +class CMSInfo: + width: Any + depth: Any + count: Any + def __init__(self, args) -> None: ... + +class TopKInfo: + k: Any + width: Any + depth: Any + decay: Any + def __init__(self, args) -> None: ... + +class TDigestInfo: + compression: Any + capacity: Any + mergedNodes: Any + unmergedNodes: Any + mergedWeight: Any + unmergedWeight: Any + totalCompressions: Any + def __init__(self, args) -> None: ... diff --git a/valkey/commands/cluster.pyi b/valkey/commands/cluster.pyi new file mode 100644 index 00000000..2654a73f --- /dev/null +++ b/valkey/commands/cluster.pyi @@ -0,0 +1,60 @@ +from _typeshed import Incomplete +from typing import NoReturn + +from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType + +class ClusterMultiKeyCommands: + def mget_nonatomic(self, keys, *args): ... + def mset_nonatomic(self, mapping): ... + def exists(self, *keys): ... + def delete(self, *keys): ... + def touch(self, *keys): ... + def unlink(self, *keys): ... + +class ClusterManagementCommands(ManagementCommands): + def slaveof(self, *args, **kwargs) -> None: ... + def replicaof(self, *args, **kwargs) -> None: ... + def swapdb(self, *args, **kwargs) -> None: ... + +class ClusterDataAccessCommands(DataAccessCommands[_StrType]): + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs, + ): ... + +class ValkeyClusterCommands( + ClusterMultiKeyCommands, ClusterManagementCommands, ACLCommands[_StrType], PubSubCommands, ClusterDataAccessCommands[_StrType] +): + def cluster_addslots(self, target_node, *slots): ... + def cluster_countkeysinslot(self, slot_id): ... + def cluster_count_failure_report(self, node_id): ... + def cluster_delslots(self, *slots): ... + def cluster_failover(self, target_node, option: Incomplete | None = None): ... + def cluster_info(self, target_nodes: Incomplete | None = None): ... + def cluster_keyslot(self, key): ... + def cluster_meet(self, host, port, target_nodes: Incomplete | None = None): ... + def cluster_nodes(self): ... + def cluster_replicate(self, target_nodes, node_id): ... + def cluster_reset(self, soft: bool = True, target_nodes: Incomplete | None = None): ... + def cluster_save_config(self, target_nodes: Incomplete | None = None): ... + def cluster_get_keys_in_slot(self, slot, num_keys): ... + def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = None): ... + def cluster_setslot(self, target_node, node_id, slot_id, state): ... + def cluster_setslot_stable(self, slot_id): ... + def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ... + def cluster_slots(self, target_nodes: Incomplete | None = None): ... + def cluster_myshardid(self, target_nodes: Incomplete | None = None): ... + def cluster_links(self, target_node): ... + def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ... + def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ... + read_from_replicas: bool + def readonly(self, target_nodes: Incomplete | None = None): ... + def readwrite(self, target_nodes: Incomplete | None = None): ... diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi new file mode 100644 index 00000000..770bf4ce --- /dev/null +++ b/valkey/commands/core.pyi @@ -0,0 +1,1743 @@ +import builtins +from _typeshed import Incomplete, SupportsItems +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from typing import Any, Generic, Literal, TypeVar, overload + +from ..asyncio.client import Valkey as AsyncValkey +from ..client import _CommandOptions, _Key, _Value +from ..typing import ChannelT, EncodableT, KeyT, PatternT, ScriptTextT, StreamIdT + +_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") +_StrType = TypeVar("_StrType", bound=str | bytes) + +class ACLCommands(Generic[_StrType]): + def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + def acl_help(self, **kwargs: _CommandOptions): ... + def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ... + def acl_log_reset(self, **kwargs: _CommandOptions): ... + def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + def acl_save(self, **kwargs: _CommandOptions): ... + def acl_setuser( + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class AsyncACLCommands(Generic[_StrType]): + async def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + async def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ... + async def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + async def acl_help(self, **kwargs: _CommandOptions): ... + async def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ... + async def acl_log_reset(self, **kwargs: _CommandOptions): ... + async def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + async def acl_save(self, **kwargs: _CommandOptions): ... + async def acl_setuser( + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Sequence[str] | None = None, + hashed_passwords: Sequence[str] | None = None, + categories: Sequence[str] | None = None, + commands: Sequence[str] | None = None, + keys: Sequence[str] | None = None, + channels: Iterable[ChannelT] | None = None, + selectors: Iterable[tuple[str, KeyT]] | None = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs: _CommandOptions, + ) -> bool: ... + async def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + async def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class ManagementCommands: + def bgrewriteaof(self, **kwargs: _CommandOptions): ... + def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ... + def role(self): ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + def client_info(self, **kwargs: _CommandOptions): ... + def client_list( + self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + def client_getredir(self, **kwargs: _CommandOptions): ... + def client_reply(self, reply, **kwargs: _CommandOptions): ... + def client_id(self, **kwargs: _CommandOptions) -> int: ... + def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ): ... + def client_trackinginfo(self, **kwargs: _CommandOptions): ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... + def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... + def client_unpause(self, **kwargs: _CommandOptions): ... + def client_no_evict(self, mode: str): ... + def client_no_touch(self, mode: str): ... + def command(self, **kwargs: _CommandOptions): ... + def command_info(self, **kwargs: _CommandOptions): ... + def command_count(self, **kwargs: _CommandOptions): ... + def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ... + def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + def config_resetstat(self, **kwargs: _CommandOptions): ... + def config_rewrite(self, **kwargs: _CommandOptions): ... + def dbsize(self, **kwargs: _CommandOptions) -> int: ... + def debug_object(self, key, **kwargs: _CommandOptions): ... + def debug_segfault(self, **kwargs: _CommandOptions): ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + def sync(self): ... + def psync(self, replicationid, offset): ... + def swapdb(self, first, second, **kwargs: _CommandOptions): ... + def select(self, index, **kwargs: _CommandOptions): ... + def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + def lastsave(self, **kwargs: _CommandOptions): ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + def reset(self) -> None: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + def object(self, infotype, key, **kwargs: _CommandOptions): ... + def memory_doctor(self, **kwargs: _CommandOptions): ... + def memory_help(self, **kwargs: _CommandOptions): ... + def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... + def memory_purge(self, **kwargs: _CommandOptions): ... + def ping(self, **kwargs: _CommandOptions) -> bool: ... + def quit(self, **kwargs: _CommandOptions): ... + def replicaof(self, *args, **kwargs: _CommandOptions): ... + def save(self, **kwargs: _CommandOptions) -> bool: ... + def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> None: ... + def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ... + def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ... + def slowlog_len(self, **kwargs: _CommandOptions): ... + def slowlog_reset(self, **kwargs: _CommandOptions): ... + def time(self, **kwargs: _CommandOptions): ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class AsyncManagementCommands: + async def bgrewriteaof(self, **kwargs: _CommandOptions): ... + async def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ... + async def role(self): ... + async def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + async def client_kill_filter( + self, + _id: Incomplete | None = None, + _type: Incomplete | None = None, + addr: Incomplete | None = None, + skipme: Incomplete | None = None, + laddr: Incomplete | None = None, + user: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + async def client_info(self, **kwargs: _CommandOptions): ... + async def client_list( + self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + async def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + async def client_getredir(self, **kwargs: _CommandOptions): ... + async def client_reply(self, reply, **kwargs: _CommandOptions): ... + async def client_id(self, **kwargs: _CommandOptions) -> int: ... + async def client_tracking_on( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + async def client_tracking_off( + self, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ): ... + async def client_tracking( + self, + on: bool = True, + clientid: Incomplete | None = None, + prefix=[], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs: _CommandOptions, + ): ... + async def client_trackinginfo(self, **kwargs: _CommandOptions): ... + async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + async def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ... + async def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ... + async def client_unpause(self, **kwargs: _CommandOptions): ... + async def command(self, **kwargs: _CommandOptions): ... + async def command_info(self, **kwargs: _CommandOptions): ... + async def command_count(self, **kwargs: _CommandOptions): ... + async def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ... + async def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ... + async def config_resetstat(self, **kwargs: _CommandOptions): ... + async def config_rewrite(self, **kwargs: _CommandOptions): ... + async def dbsize(self, **kwargs: _CommandOptions) -> int: ... + async def debug_object(self, key, **kwargs: _CommandOptions): ... + async def debug_segfault(self, **kwargs: _CommandOptions): ... + async def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + async def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ... + async def sync(self): ... + async def psync(self, replicationid, offset): ... + async def swapdb(self, first, second, **kwargs: _CommandOptions): ... + async def select(self, index, **kwargs: _CommandOptions): ... + async def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + async def lastsave(self, **kwargs: _CommandOptions): ... + async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + async def reset(self) -> None: ... + async def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = False, + replace: bool = False, + auth: Incomplete | None = None, + **kwargs: _CommandOptions, + ): ... + async def object(self, infotype, key, **kwargs: _CommandOptions): ... + async def memory_doctor(self, **kwargs: _CommandOptions): ... + async def memory_help(self, **kwargs: _CommandOptions): ... + async def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + async def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + async def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def memory_purge(self, **kwargs: _CommandOptions): ... + async def ping(self, **kwargs: _CommandOptions) -> bool: ... + async def quit(self, **kwargs: _CommandOptions): ... + async def replicaof(self, *args, **kwargs: _CommandOptions): ... + async def save(self, **kwargs: _CommandOptions) -> bool: ... + async def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs: _CommandOptions, + ) -> None: ... + async def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ... + async def slowlog_len(self, **kwargs: _CommandOptions): ... + async def slowlog_reset(self, **kwargs: _CommandOptions): ... + async def time(self, **kwargs: _CommandOptions): ... + async def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class BasicKeyCommands(Generic[_StrType]): + def append(self, key, value): ... + def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... + def bitfield(self, key, default_overflow: Incomplete | None = None): ... + def bitop(self, operation, dest, *keys): ... + def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... + def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... + def decr(self, name, amount: int = 1) -> int: ... + def decrby(self, name, amount: int = 1) -> int: ... + def delete(self, *names: _Key) -> int: ... + def __delitem__(self, name: _Key) -> None: ... + def dump(self, name: _Key) -> _StrType | None: ... + def exists(self, *names: _Key) -> int: ... + __contains__ = exists + def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> bool: ... + def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ... + def get(self, name: _Key) -> _StrType | None: ... + def getdel(self, name: _Key) -> _StrType | None: ... + def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ): ... + def __getitem__(self, name: str): ... + def getbit(self, name: _Key, offset: int) -> int: ... + def getrange(self, key, start, end): ... + def getset(self, name, value) -> _StrType | None: ... + def incr(self, name: _Key, amount: int = 1) -> int: ... + def incrby(self, name: _Key, amount: int = 1) -> int: ... + def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ... + def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ... + def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value | None: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + def move(self, name: _Key, db: int) -> bool: ... + def persist(self, name: _Key) -> bool: ... + def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + def psetex(self, name, time_ms, value): ... + def pttl(self, name: _Key) -> int: ... + def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ... + def randomkey(self, **kwargs: _CommandOptions): ... + def rename(self, src, dst): ... + def renamenx(self, src, dst): ... + def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ): ... + def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = None, + px: None | float | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> bool | None: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> int: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + def setnx(self, name: _Key, value: _Value) -> bool: ... + def setrange(self, name, offset, value): ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ): ... + def strlen(self, name): ... + def substr(self, name, start, end: int = -1): ... + def touch(self, *args): ... + def ttl(self, name: _Key) -> int: ... + def type(self, name): ... + def watch(self, *names): ... + def unwatch(self): ... + def unlink(self, *names: _Key) -> int: ... + +class AsyncBasicKeyCommands(Generic[_StrType]): + async def append(self, key, value): ... + async def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ... + async def bitfield(self, key, default_overflow: Incomplete | None = None): ... + async def bitop(self, operation, dest, *keys): ... + async def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ... + async def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ... + async def decr(self, name, amount: int = 1) -> int: ... + async def decrby(self, name, amount: int = 1) -> int: ... + async def delete(self, *names: _Key) -> int: ... + async def dump(self, name: _Key) -> _StrType | None: ... + async def exists(self, *names: _Key) -> int: ... + async def expire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> bool: ... + async def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ... + async def get(self, name: _Key) -> _StrType | None: ... + async def getdel(self, name: _Key) -> _StrType | None: ... + async def getex( + self, + name, + ex: Incomplete | None = None, + px: Incomplete | None = None, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + persist: bool = False, + ): ... + async def getbit(self, name: _Key, offset: int) -> int: ... + async def getrange(self, key, start, end): ... + async def getset(self, name, value) -> _StrType | None: ... + async def incr(self, name: _Key, amount: int = 1) -> int: ... + async def incrby(self, name: _Key, amount: int = 1) -> int: ... + async def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ... + async def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ... + async def lmove( + self, + first_list: _Key, + second_list: _Key, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value: ... + async def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = "LEFT", + dest: Literal["LEFT", "RIGHT"] = "RIGHT", + ) -> _Value | None: ... + async def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + async def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + async def move(self, name: _Key, db: int) -> bool: ... + async def persist(self, name: _Key) -> bool: ... + async def pexpire( + self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + async def pexpireat( + self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False + ) -> Literal[1, 0]: ... + async def psetex(self, name, time_ms, value): ... + async def pttl(self, name: _Key) -> int: ... + async def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ... + async def randomkey(self, **kwargs: _CommandOptions): ... + async def rename(self, src, dst): ... + async def renamenx(self, src, dst): ... + async def restore( + self, + name, + ttl, + value, + replace: bool = False, + absttl: bool = False, + idletime: Incomplete | None = None, + frequency: Incomplete | None = None, + ): ... + async def set( + self, + name: _Key, + value: _Value, + ex: None | float | timedelta = None, + px: None | float | timedelta = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Incomplete | None = None, + pxat: Incomplete | None = None, + ) -> bool | None: ... + async def setbit(self, name: _Key, offset: int, value: int) -> int: ... + async def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + async def setnx(self, name: _Key, value: _Value) -> bool: ... + async def setrange(self, name, offset, value): ... + async def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Incomplete | None = None, + withmatchlen: bool = False, + **kwargs: _CommandOptions, + ): ... + async def strlen(self, name): ... + async def substr(self, name, start, end: int = -1): ... + async def touch(self, *args): ... + async def ttl(self, name: _Key) -> int: ... + async def type(self, name): ... + async def watch(self, *names): ... + async def unwatch(self): ... + async def unlink(self, *names: _Key) -> int: ... + def __getitem__(self, name: str): ... + def __setitem__(self, name, value) -> None: ... + def __delitem__(self, name: _Key) -> None: ... + def __contains__(self, name: _Key) -> None: ... + +class ListCommands(Generic[_StrType]): + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + def brpoplpush(self, src, dst, timeout: int | None = 0): ... + def lindex(self, name: _Key, index: int) -> _StrType | None: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + def llen(self, name: _Key) -> int: ... + def lpop(self, name, count: int | None = None): ... + def lpush(self, name: _Value, *values: _Value) -> int: ... + def lpushx(self, name, value): ... + def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + def rpop(self, name, count: int | None = None): ... + def rpoplpush(self, src, dst): ... + def rpush(self, name: _Value, *values: _Value) -> int: ... + def rpushx(self, name, value): ... + def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ): ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> int: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> int: ... + +class AsyncListCommands(Generic[_StrType]): + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + async def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ... + @overload + async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + async def brpoplpush(self, src, dst, timeout: int | None = 0): ... + async def lindex(self, name: _Key, index: int) -> _StrType | None: ... + async def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + async def llen(self, name: _Key) -> int: ... + async def lpop(self, name, count: int | None = None): ... + async def lpush(self, name: _Value, *values: _Value) -> int: ... + async def lpushx(self, name, value): ... + async def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + async def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + async def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + async def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + async def rpop(self, name, count: int | None = None): ... + async def rpoplpush(self, src, dst): ... + async def rpush(self, name: _Value, *values: _Value) -> int: ... + async def rpushx(self, name, value): ... + async def lpos( + self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None + ): ... + @overload + async def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + store: None = None, + groups: bool = False, + ) -> list[_StrType]: ... + @overload + async def sort( + self, + name: _Key, + start: int | None = None, + num: int | None = None, + by: _Key | None = None, + get: _Key | Sequence[_Key] | None = None, + desc: bool = False, + alpha: bool = False, + *, + store: _Key, + groups: bool = False, + ) -> int: ... + @overload + async def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = False, + ) -> int: ... + +class ScanCommands(Generic[_StrType]): + def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions + ) -> Iterator[_StrType]: ... + def sscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[_StrType]: ... + def hscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> Iterator[tuple[_StrType, _StrType]]: ... + @overload + def zscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class AsyncScanCommands(Generic[_StrType]): + async def scan( + self, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + _type: str | None = None, + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions + ) -> AsyncIterator[_StrType]: ... + async def sscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> AsyncIterator[_StrType]: ... + async def hscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> AsyncIterator[tuple[_StrType, _StrType]]: ... + @overload + async def zscan( + self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None + ) -> tuple[int, list[tuple[_StrType, float]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int = 0, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + async def zscan( + self, + name: _Key, + cursor: int, + match: _Key | None, + count: int | None, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None = None, count: int | None = None + ) -> AsyncIterator[tuple[_StrType, float]]: ... + @overload + def zscan_iter( + self, + name: _Key, + match: _Key | None = None, + count: int | None = None, + *, + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zscan_iter( + self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] + ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class SetCommands(Generic[_StrType]): + def sadd(self, name: _Key, *values: _Value) -> int: ... + def scard(self, name: _Key) -> int: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sismember(self, name: _Key, value: _Value) -> bool: ... + def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + def smismember(self, name, values, *args): ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + def spop(self, name: _Key, count: None = None) -> _Value | None: ... + @overload + def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + def srandmember(self, name: _Key, number: None = None) -> _Value | None: ... + @overload + def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + def srem(self, name: _Key, *values: _Value) -> int: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class AsyncSetCommands(Generic[_StrType]): + async def sadd(self, name: _Key, *values: _Value) -> int: ... + async def scard(self, name: _Key) -> int: ... + async def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + async def sismember(self, name: _Key, value: _Value) -> bool: ... + async def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + async def smismember(self, name, values, *args): ... + async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + async def spop(self, name: _Key, count: None = None) -> _Value | None: ... + @overload + async def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + async def srandmember(self, name: _Key, number: None = None) -> _Value | None: ... + @overload + async def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + async def srem(self, name: _Key, *values: _Value) -> int: ... + async def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + async def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class StreamCommands: + def xack(self, name, groupname, *ids): ... + def xadd( + self, + name: KeyT, + # Only accepts dict objects, but for variance reasons we use a looser annotation + fields: SupportsItems[bytes | memoryview | str | float, Any], + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ): ... + def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ): ... + def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ): ... + def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... + def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + def xgroup_delconsumer(self, name, groupname, consumername): ... + def xgroup_destroy(self, name, groupname): ... + def xgroup_createconsumer(self, name, groupname, consumername): ... + def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ... + def xinfo_consumers(self, name, groupname): ... + def xinfo_groups(self, name): ... + def xinfo_stream(self, name, full: bool = False): ... + def xlen(self, name: _Key) -> int: ... + def xpending(self, name, groupname): ... + def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ): ... + def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ... + def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ... + def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ): ... + def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ... + def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ): ... + +class AsyncStreamCommands: + async def xack(self, name, groupname, *ids): ... + async def xadd( + self, + name: KeyT, + # Only accepts dict objects, but for variance reasons we use a looser annotation + fields: SupportsItems[bytes | memoryview | str | float, Any], + id: str | int | bytes | memoryview = "*", + maxlen=None, + approximate: bool = True, + nomkstream: bool = False, + minid: Incomplete | None = None, + limit: Incomplete | None = None, + ): ... + async def xautoclaim( + self, + name, + groupname, + consumername, + min_idle_time, + start_id: StreamIdT = "0-0", + count: Incomplete | None = None, + justid: bool = False, + ): ... + async def xclaim( + self, + name, + groupname, + consumername, + min_idle_time, + message_ids, + idle=None, + time=None, + retrycount=None, + force=False, + justid=False, + ): ... + async def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ... + async def xgroup_create(self, name, groupname, id: str = "$", mkstream: bool = False, entries_read: int | None = None): ... + async def xgroup_delconsumer(self, name, groupname, consumername): ... + async def xgroup_destroy(self, name, groupname): ... + async def xgroup_createconsumer(self, name, groupname, consumername): ... + async def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ... + async def xinfo_consumers(self, name, groupname): ... + async def xinfo_groups(self, name): ... + async def xinfo_stream(self, name, full: bool = False): ... + async def xlen(self, name: _Key) -> int: ... + async def xpending(self, name, groupname): ... + async def xpending_range( + self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None + ): ... + async def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ... + async def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ... + async def xreadgroup( + self, + groupname, + consumername, + streams, + count: Incomplete | None = None, + block: Incomplete | None = None, + noack: bool = False, + ): ... + async def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ... + async def xtrim( + self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None + ): ... + +class SortedSetCommands(Generic[_StrType]): + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> int: ... + def zcard(self, name: _Key) -> int: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zdiff(self, keys, withscores: bool = False): ... + def zdiffstore(self, dest, keys): ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[_StrType]: ... + @overload + def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrange( # type: ignore[overload-overlap] + self, name: _Key, start: int, end: int, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ): ... + def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + @overload + def zrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + def zrevrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + def zrem(self, name: _Key, *values: _Value) -> int: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + def zscore(self, name: _Key, value: _Value) -> float | None: ... + def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ... + def zmscore(self, key, members): ... + +class AsyncSortedSetCommands(Generic[_StrType]): + async def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: Incomplete | None = False, + lt: Incomplete | None = False, + ) -> int: ... + async def zcard(self, name: _Key) -> int: ... + async def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zdiff(self, keys, withscores: bool = False): ... + async def zdiffstore(self, dest, keys): ... + async def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + async def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + async def zinterstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> int: ... + async def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + async def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ... + async def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ... + @overload + async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrange( # type: ignore[overload-overlap] + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], float] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = False, + bylex: bool = False, + offset: int | None = None, + num: int | None = None, + ) -> list[_StrType]: ... + @overload + async def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrange( # type: ignore[overload-overlap] + self, name: _Key, start: int, end: int, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + async def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Incomplete | None = None, + num: Incomplete | None = None, + ): ... + async def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + async def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None + ) -> list[_StrType]: ... + @overload + async def zrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + async def zrevrangebyscore( # type: ignore[overload-overlap] + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn], + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + async def zrevrangebyscore( # type: ignore[overload-overlap] + self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True] + ) -> list[tuple[_StrType, float]]: ... + @overload + async def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = None, + num: int | None = None, + withscores: bool = False, + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + async def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + async def zrem(self, name: _Key, *values: _Value) -> int: ... + async def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + async def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + async def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ... + async def zscore(self, name: _Key, value: _Value) -> float | None: ... + async def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ... + async def zunionstore( + self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None + ) -> int: ... + async def zmscore(self, key, members): ... + +class HyperlogCommands: + def pfadd(self, name: _Key, *values: _Value) -> int: ... + def pfcount(self, name: _Key) -> int: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class AsyncHyperlogCommands: + async def pfadd(self, name: _Key, *values: _Value) -> int: ... + async def pfcount(self, name: _Key) -> int: ... + async def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class HashCommands(Generic[_StrType]): + def hdel(self, name: _Key, *keys: _Key) -> int: ... + def hexists(self, name: _Key, key: _Key) -> bool: ... + def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ... + def hkeys(self, name: _Key) -> list[_StrType]: ... + def hlen(self, name: _Key) -> int: ... + @overload + def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> int: ... + @overload + def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> int: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def hvals(self, name: _Key) -> list[_StrType]: ... + def hstrlen(self, name, key): ... + +class AsyncHashCommands(Generic[_StrType]): + async def hdel(self, name: _Key, *keys: _Key) -> int: ... + async def hexists(self, name: _Key, key: _Key) -> bool: ... + async def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + async def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + async def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ... + async def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ... + async def hkeys(self, name: _Key) -> list[_StrType]: ... + async def hlen(self, name: _Key) -> int: ... + @overload + async def hset( + self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None + ) -> int: ... + @overload + async def hset( + self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None + ) -> int: ... + @overload + async def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ... + async def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + async def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + async def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + async def hvals(self, name: _Key) -> list[_StrType]: ... + async def hstrlen(self, name, key): ... + +class AsyncScript: + def __init__(self, registered_client: AsyncValkey[Any], script: ScriptTextT) -> None: ... + async def __call__( + self, keys: Sequence[KeyT] | None = None, args: Iterable[EncodableT] | None = None, client: AsyncValkey[Any] | None = None + ): ... + +class PubSubCommands: + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class AsyncPubSubCommands: + async def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + async def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ... + async def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + async def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class ScriptCommands(Generic[_StrType]): + def eval(self, script, numkeys, *keys_and_args): ... + def evalsha(self, sha, numkeys, *keys_and_args): ... + def script_exists(self, *args): ... + def script_debug(self, *args): ... + def script_flush(self, sync_type: Incomplete | None = None): ... + def script_kill(self): ... + def script_load(self, script): ... + def register_script(self, script: str | _StrType) -> Script: ... + +class AsyncScriptCommands(Generic[_StrType]): + async def eval(self, script, numkeys, *keys_and_args): ... + async def evalsha(self, sha, numkeys, *keys_and_args): ... + async def script_exists(self, *args): ... + async def script_debug(self, *args): ... + async def script_flush(self, sync_type: Incomplete | None = None): ... + async def script_kill(self): ... + async def script_load(self, script): ... + def register_script(self, script: ScriptTextT) -> AsyncScript: ... + +class GeoCommands: + def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ... + def geodist(self, name, place1, place2, unit: Incomplete | None = None): ... + def geohash(self, name, *values): ... + def geopos(self, name, *values): ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ): ... + def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ): ... + +class AsyncGeoCommands: + async def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ... + async def geodist(self, name, place1, place2, unit: Incomplete | None = None): ... + async def geohash(self, name, *values): ... + async def geopos(self, name, *values): ... + async def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + async def georadiusbymember( + self, + name, + member, + radius, + unit: Incomplete | None = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Incomplete | None = None, + sort: Incomplete | None = None, + store: Incomplete | None = None, + store_dist: Incomplete | None = None, + any: bool = False, + ): ... + async def geosearch( + self, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ): ... + async def geosearchstore( + self, + dest, + name, + member: Incomplete | None = None, + longitude: Incomplete | None = None, + latitude: Incomplete | None = None, + unit: str = "m", + radius: Incomplete | None = None, + width: Incomplete | None = None, + height: Incomplete | None = None, + sort: Incomplete | None = None, + count: Incomplete | None = None, + any: bool = False, + storedist: bool = False, + ): ... + +class ModuleCommands: + def module_load(self, path, *args): ... + def module_unload(self, name): ... + def module_list(self): ... + def command_info(self): ... + def command_count(self): ... + def command_getkeys(self, *args): ... + def command(self): ... + +class Script: + def __init__(self, registered_client, script) -> None: ... + def __call__(self, keys=[], args=[], client: Incomplete | None = None): ... + +class BitFieldOperation: + def __init__(self, client, key, default_overflow: Incomplete | None = None): ... + def reset(self) -> None: ... + def overflow(self, overflow): ... + def incrby(self, fmt, offset, increment, overflow: Incomplete | None = None): ... + def get(self, fmt, offset): ... + def set(self, fmt, offset, value): ... + @property + def command(self): ... + def execute(self): ... + +class AsyncModuleCommands(ModuleCommands): + async def command_info(self) -> None: ... + +class ClusterCommands: + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class AsyncClusterCommands: + async def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + async def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + async def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class FunctionCommands: + def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ... + def function_delete(self, library: str) -> Awaitable[str] | str: ... + def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ... + def function_list(self, library: str | None = "*", withcode: bool | None = False) -> Awaitable[list[Any]] | list[Any]: ... + def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + def function_dump(self) -> Awaitable[str] | str: ... + def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ... + def function_kill(self) -> Awaitable[str] | str: ... + def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class AsyncFunctionCommands: + async def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ... + async def function_delete(self, library: str) -> Awaitable[str] | str: ... + async def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ... + async def function_list( + self, library: str | None = "*", withcode: bool | None = False + ) -> Awaitable[list[Any]] | list[Any]: ... + async def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ... + async def function_dump(self) -> Awaitable[str] | str: ... + async def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ... + async def function_kill(self) -> Awaitable[str] | str: ... + async def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ... + +class DataAccessCommands( + BasicKeyCommands[_StrType], + HyperlogCommands, + HashCommands[_StrType], + GeoCommands, + ListCommands[_StrType], + ScanCommands[_StrType], + SetCommands[_StrType], + StreamCommands, + SortedSetCommands[_StrType], +): ... +class AsyncDataAccessCommands( + AsyncBasicKeyCommands[_StrType], + AsyncHyperlogCommands, + AsyncHashCommands[_StrType], + AsyncGeoCommands, + AsyncListCommands[_StrType], + AsyncScanCommands[_StrType], + AsyncSetCommands[_StrType], + AsyncStreamCommands, + AsyncSortedSetCommands[_StrType], +): ... +class CoreCommands( + ACLCommands[_StrType], + ClusterCommands, + DataAccessCommands[_StrType], + ManagementCommands, + ModuleCommands, + PubSubCommands, + ScriptCommands[_StrType], +): ... +class AsyncCoreCommands( + AsyncACLCommands[_StrType], + AsyncClusterCommands, + AsyncDataAccessCommands[_StrType], + AsyncManagementCommands, + AsyncModuleCommands, + AsyncPubSubCommands, + AsyncScriptCommands[_StrType], + AsyncFunctionCommands, +): ... diff --git a/valkey/commands/graph/__init__.pyi b/valkey/commands/graph/__init__.pyi new file mode 100644 index 00000000..a8209b8d --- /dev/null +++ b/valkey/commands/graph/__init__.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from .commands import GraphCommands as GraphCommands +from .edge import Edge as Edge +from .node import Node as Node +from .path import Path as Path + +class Graph(GraphCommands): + NAME: Any + client: Any + execute_command: Any + nodes: Any + edges: Any + version: int + def __init__(self, client, name=...) -> None: ... + @property + def name(self): ... + def get_label(self, idx): ... + def get_relation(self, idx): ... + def get_property(self, idx): ... + def add_node(self, node) -> None: ... + def add_edge(self, edge) -> None: ... + def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ... + def labels(self): ... + def relationship_types(self): ... + def property_keys(self): ... diff --git a/valkey/commands/graph/commands.pyi b/valkey/commands/graph/commands.pyi new file mode 100644 index 00000000..b57418dd --- /dev/null +++ b/valkey/commands/graph/commands.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete +from typing import Any + +class GraphCommands: + def commit(self): ... + version: Any + def query( + self, + q, + params: Incomplete | None = None, + timeout: Incomplete | None = None, + read_only: bool = False, + profile: bool = False, + ): ... + def merge(self, pattern): ... + def delete(self): ... + nodes: Any + edges: Any + def flush(self) -> None: ... + def explain(self, query, params: Incomplete | None = None): ... + def bulk(self, **kwargs) -> None: ... + def profile(self, query): ... + def slowlog(self): ... + def config(self, name, value: Incomplete | None = None, set: bool = False): ... + def list_keys(self): ... diff --git a/valkey/commands/graph/edge.pyi b/valkey/commands/graph/edge.pyi new file mode 100644 index 00000000..3bd36b65 --- /dev/null +++ b/valkey/commands/graph/edge.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +class Edge: + id: Any + relation: Any + properties: Any + src_node: Any + dest_node: Any + def __init__( + self, src_node, relation, dest_node, edge_id: Incomplete | None = None, properties: Incomplete | None = None + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/valkey/commands/graph/exceptions.pyi b/valkey/commands/graph/exceptions.pyi new file mode 100644 index 00000000..6069e055 --- /dev/null +++ b/valkey/commands/graph/exceptions.pyi @@ -0,0 +1,5 @@ +from typing import Any + +class VersionMismatchException(Exception): + version: Any + def __init__(self, version) -> None: ... diff --git a/valkey/commands/graph/node.pyi b/valkey/commands/graph/node.pyi new file mode 100644 index 00000000..e7a65537 --- /dev/null +++ b/valkey/commands/graph/node.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class Node: + id: Any + alias: Any + label: Any + labels: Any + properties: Any + def __init__( + self, + node_id: Incomplete | None = None, + alias: Incomplete | None = None, + label: str | list[str] | None = None, + properties: Incomplete | None = None, + ) -> None: ... + def to_string(self): ... + def __eq__(self, rhs): ... diff --git a/valkey/commands/graph/path.pyi b/valkey/commands/graph/path.pyi new file mode 100644 index 00000000..69106f89 --- /dev/null +++ b/valkey/commands/graph/path.pyi @@ -0,0 +1,18 @@ +from typing import Any + +class Path: + append_type: Any + def __init__(self, nodes, edges) -> None: ... + @classmethod + def new_empty_path(cls): ... + def nodes(self): ... + def edges(self): ... + def get_node(self, index): ... + def get_relationship(self, index): ... + def first_node(self): ... + def last_node(self): ... + def edge_count(self): ... + def nodes_count(self): ... + def add_node(self, node): ... + def add_edge(self, edge): ... + def __eq__(self, other): ... diff --git a/valkey/commands/graph/query_result.pyi b/valkey/commands/graph/query_result.pyi new file mode 100644 index 00000000..d9f8b514 --- /dev/null +++ b/valkey/commands/graph/query_result.pyi @@ -0,0 +1,74 @@ +from typing import Any, ClassVar, Literal + +LABELS_ADDED: str +NODES_CREATED: str +NODES_DELETED: str +RELATIONSHIPS_DELETED: str +PROPERTIES_SET: str +RELATIONSHIPS_CREATED: str +INDICES_CREATED: str +INDICES_DELETED: str +CACHED_EXECUTION: str +INTERNAL_EXECUTION_TIME: str +STATS: Any + +class ResultSetColumnTypes: + COLUMN_UNKNOWN: ClassVar[Literal[0]] + COLUMN_SCALAR: ClassVar[Literal[1]] + COLUMN_NODE: ClassVar[Literal[2]] + COLUMN_RELATION: ClassVar[Literal[3]] + +class ResultSetScalarTypes: + VALUE_UNKNOWN: ClassVar[Literal[0]] + VALUE_NULL: ClassVar[Literal[1]] + VALUE_STRING: ClassVar[Literal[2]] + VALUE_INTEGER: ClassVar[Literal[3]] + VALUE_BOOLEAN: ClassVar[Literal[4]] + VALUE_DOUBLE: ClassVar[Literal[5]] + VALUE_ARRAY: ClassVar[Literal[6]] + VALUE_EDGE: ClassVar[Literal[7]] + VALUE_NODE: ClassVar[Literal[8]] + VALUE_PATH: ClassVar[Literal[9]] + VALUE_MAP: ClassVar[Literal[10]] + VALUE_POINT: ClassVar[Literal[11]] + +class QueryResult: + graph: Any + header: Any + result_set: Any + def __init__(self, graph, response, profile: bool = False) -> None: ... + def parse_results(self, raw_result_set) -> None: ... + statistics: Any + def parse_statistics(self, raw_statistics) -> None: ... + def parse_header(self, raw_result_set): ... + def parse_records(self, raw_result_set): ... + def parse_entity_properties(self, props): ... + def parse_string(self, cell): ... + def parse_node(self, cell): ... + def parse_edge(self, cell): ... + def parse_path(self, cell): ... + def parse_map(self, cell): ... + def parse_point(self, cell): ... + def parse_scalar(self, cell): ... + def parse_profile(self, response) -> None: ... + def is_empty(self): ... + @property + def labels_added(self): ... + @property + def nodes_created(self): ... + @property + def nodes_deleted(self): ... + @property + def properties_set(self): ... + @property + def relationships_created(self): ... + @property + def relationships_deleted(self): ... + @property + def indices_created(self): ... + @property + def indices_deleted(self): ... + @property + def cached_execution(self): ... + @property + def run_time_ms(self): ... diff --git a/valkey/commands/helpers.pyi b/valkey/commands/helpers.pyi new file mode 100644 index 00000000..b4e5ac7f --- /dev/null +++ b/valkey/commands/helpers.pyi @@ -0,0 +1,10 @@ +def list_or_args(keys, args): ... +def nativestr(x): ... +def delist(x): ... +def parse_to_list(response): ... +def parse_list_to_dict(response): ... +def parse_to_dict(response): ... +def random_string(length: int = 10) -> str: ... +def quote_string(v): ... +def decode_dict_keys(obj): ... +def stringify_param_value(value): ... diff --git a/valkey/commands/json/__init__.pyi b/valkey/commands/json/__init__.pyi new file mode 100644 index 00000000..f9e8825b --- /dev/null +++ b/valkey/commands/json/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import JSONCommands + +class JSON(JSONCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + MODULE_VERSION: Incomplete | None + def __init__(self, client, version: Incomplete | None = None, decoder=..., encoder=...) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ... + +class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/valkey/commands/json/commands.pyi b/valkey/commands/json/commands.pyi new file mode 100644 index 00000000..38d4d4c6 --- /dev/null +++ b/valkey/commands/json/commands.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete + +class JSONCommands: + def arrappend(self, name: str, path: str | None = ".", *args) -> list[int | None]: ... + def arrindex( + self, name: str, path: str, scalar: int, start: int | None = None, stop: int | None = None + ) -> list[int | None]: ... + def arrinsert(self, name: str, path: str, index: int, *args) -> list[int | None]: ... + def arrlen(self, name: str, path: str | None = ".") -> list[int | None]: ... + def arrpop(self, name: str, path: str | None = ".", index: int | None = -1) -> list[str | None]: ... + def arrtrim(self, name: str, path: str, start: int, stop: int) -> list[int | None]: ... + def type(self, name: str, path: str | None = ".") -> list[str]: ... + def resp(self, name: str, path: str | None = ".") -> list[Incomplete]: ... + def objkeys(self, name, path="."): ... + def objlen(self, name, path="."): ... + def numincrby(self, name, path, number): ... + def nummultby(self, name, path, number): ... + def clear(self, name, path="."): ... + def delete(self, key, path="."): ... + forget = delete + def get(self, name, *args, no_escape: bool = False): ... + def mget(self, keys, path): ... + def set(self, name, path, obj, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def set_file(self, name, path, file_name, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def set_path(self, json_path, root_folder, nx: bool = False, xx: bool = False, decode_keys: bool = False): ... + def strlen(self, name, path: Incomplete | None = None): ... + def toggle(self, name, path="."): ... + def strappend(self, name, value, path="."): ... + def debug(self, subcommand, key: Incomplete | None = None, path="."): ... + def jsonget(self, *args, **kwargs): ... + def jsonmget(self, *args, **kwargs): ... + def jsonset(self, *args, **kwargs): ... diff --git a/valkey/commands/json/decoders.pyi b/valkey/commands/json/decoders.pyi new file mode 100644 index 00000000..ccea2438 --- /dev/null +++ b/valkey/commands/json/decoders.pyi @@ -0,0 +1,4 @@ +def bulk_of_jsons(d): ... +def decode_dict_keys(obj): ... +def unstring(obj): ... +def decode_list(b): ... diff --git a/valkey/commands/json/path.pyi b/valkey/commands/json/path.pyi new file mode 100644 index 00000000..bbc35c4f --- /dev/null +++ b/valkey/commands/json/path.pyi @@ -0,0 +1,5 @@ +class Path: + strPath: str + @staticmethod + def root_path() -> str: ... + def __init__(self, path: str) -> None: ... diff --git a/valkey/commands/parser.pyi b/valkey/commands/parser.pyi new file mode 100644 index 00000000..f17afa28 --- /dev/null +++ b/valkey/commands/parser.pyi @@ -0,0 +1,8 @@ +from valkey.client import AbstractValkey +from valkey.typing import EncodableT + +class CommandsParser: + commands: dict[str, str] + def __init__(self, valkey_connection: AbstractValkey) -> None: ... + def initialize(self, r: AbstractValkey) -> None: ... + def get_keys(self, valkey_conn: AbstractValkey, *args: EncodableT) -> list[EncodableT] | None: ... diff --git a/valkey/commands/redismodules.pyi b/valkey/commands/redismodules.pyi new file mode 100644 index 00000000..129b2a17 --- /dev/null +++ b/valkey/commands/redismodules.pyi @@ -0,0 +1,14 @@ +from .json import JSON +from .search import Search +from .timeseries import TimeSeries + +class ValkeyModuleCommands: + def json(self, encoder=..., decoder=...) -> JSON: ... + def ft(self, index_name: str = "idx") -> Search: ... + def ts(self) -> TimeSeries: ... + def bf(self): ... + def cf(self): ... + def cms(self): ... + def topk(self): ... + def tdigest(self): ... + def graph(self, index_name: str = "idx"): ... diff --git a/valkey/commands/search/__init__.pyi b/valkey/commands/search/__init__.pyi new file mode 100644 index 00000000..3366d451 --- /dev/null +++ b/valkey/commands/search/__init__.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +from .commands import SearchCommands + +class Search(SearchCommands): + class BatchIndexer: + def __init__(self, client, chunk_size: int = 1000) -> None: ... + def add_document( + self, + doc_id, + nosave: bool = False, + score: float = 1.0, + payload: Incomplete | None = None, + replace: bool = False, + partial: bool = False, + no_create: bool = False, + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = 1.0, replace: bool = False): ... + def commit(self): ... + + def __init__(self, client, index_name: str = "idx") -> None: ... diff --git a/valkey/commands/search/aggregation.pyi b/valkey/commands/search/aggregation.pyi new file mode 100644 index 00000000..48bac218 --- /dev/null +++ b/valkey/commands/search/aggregation.pyi @@ -0,0 +1,53 @@ +from typing import Any, ClassVar, Literal + +FIELDNAME: Any + +class Limit: + offset: Any + count: Any + def __init__(self, offset: int = 0, count: int = 0) -> None: ... + def build_args(self): ... + +class Reducer: + NAME: ClassVar[None] + def __init__(self, *args) -> None: ... + def alias(self, alias): ... + @property + def args(self): ... + +class SortDirection: + DIRSTRING: ClassVar[str | None] + field: Any + def __init__(self, field) -> None: ... + +class Asc(SortDirection): + DIRSTRING: ClassVar[Literal["ASC"]] + +class Desc(SortDirection): + DIRSTRING: ClassVar[Literal["DESC"]] + +class AggregateRequest: + def __init__(self, query: str = "*") -> None: ... + def load(self, *fields): ... + def group_by(self, fields, *reducers): ... + def apply(self, **kwexpr): ... + def limit(self, offset, num): ... + def sort_by(self, *fields, **kwargs): ... + def filter(self, expressions): ... + def with_schema(self): ... + def verbatim(self): ... + def cursor(self, count: int = 0, max_idle: float = 0.0): ... + def build_args(self): ... + +class Cursor: + cid: Any + max_idle: int + count: int + def __init__(self, cid) -> None: ... + def build_args(self): ... + +class AggregateResult: + rows: Any + cursor: Any + schema: Any + def __init__(self, rows, cursor, schema) -> None: ... diff --git a/valkey/commands/search/commands.pyi b/valkey/commands/search/commands.pyi new file mode 100644 index 00000000..f8a2baf3 --- /dev/null +++ b/valkey/commands/search/commands.pyi @@ -0,0 +1,111 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any, Literal +from typing_extensions import TypeAlias + +from .aggregation import AggregateRequest, AggregateResult, Cursor +from .query import Query +from .result import Result + +_QueryParams: TypeAlias = Mapping[str, str | float] + +NUMERIC: Literal["NUMERIC"] + +CREATE_CMD: Literal["FT.CREATE"] +ALTER_CMD: Literal["FT.ALTER"] +SEARCH_CMD: Literal["FT.SEARCH"] +ADD_CMD: Literal["FT.ADD"] +ADDHASH_CMD: Literal["FT.ADDHASH"] +DROP_CMD: Literal["FT.DROP"] +EXPLAIN_CMD: Literal["FT.EXPLAIN"] +EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"] +DEL_CMD: Literal["FT.DEL"] +AGGREGATE_CMD: Literal["FT.AGGREGATE"] +PROFILE_CMD: Literal["FT.PROFILE"] +CURSOR_CMD: Literal["FT.CURSOR"] +SPELLCHECK_CMD: Literal["FT.SPELLCHECK"] +DICT_ADD_CMD: Literal["FT.DICTADD"] +DICT_DEL_CMD: Literal["FT.DICTDEL"] +DICT_DUMP_CMD: Literal["FT.DICTDUMP"] +GET_CMD: Literal["FT.GET"] +MGET_CMD: Literal["FT.MGET"] +CONFIG_CMD: Literal["FT.CONFIG"] +TAGVALS_CMD: Literal["FT.TAGVALS"] +ALIAS_ADD_CMD: Literal["FT.ALIASADD"] +ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"] +ALIAS_DEL_CMD: Literal["FT.ALIASDEL"] +INFO_CMD: Literal["FT.INFO"] +SUGADD_COMMAND: Literal["FT.SUGADD"] +SUGDEL_COMMAND: Literal["FT.SUGDEL"] +SUGLEN_COMMAND: Literal["FT.SUGLEN"] +SUGGET_COMMAND: Literal["FT.SUGGET"] +SYNUPDATE_CMD: Literal["FT.SYNUPDATE"] +SYNDUMP_CMD: Literal["FT.SYNDUMP"] + +NOOFFSETS: Literal["NOOFFSETS"] +NOFIELDS: Literal["NOFIELDS"] +STOPWORDS: Literal["STOPWORDS"] +WITHSCORES: Literal["WITHSCORES"] +FUZZY: Literal["FUZZY"] +WITHPAYLOADS: Literal["WITHPAYLOADS"] + +class SearchCommands: + def batch_indexer(self, chunk_size: int = 100): ... + def create_index( + self, + fields, + no_term_offsets: bool = False, + no_field_flags: bool = False, + stopwords: Incomplete | None = None, + definition: Incomplete | None = None, + max_text_fields: bool = False, # added in 4.1.1 + temporary: Incomplete | None = None, # added in 4.1.1 + no_highlight: bool = False, # added in 4.1.1 + no_term_frequencies: bool = False, # added in 4.1.1 + skip_initial_scan: bool = False, # added in 4.1.1 + ): ... + def alter_schema_add(self, fields): ... + def dropindex(self, delete_documents: bool = False): ... + def add_document( + self, + doc_id, + nosave: bool = False, + score: float = 1.0, + payload: Incomplete | None = None, + replace: bool = False, + partial: bool = False, + language: Incomplete | None = None, + no_create: bool = False, + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = 1.0, language: Incomplete | None = None, replace: bool = False): ... + def delete_document(self, doc_id, conn: Incomplete | None = None, delete_actual_document: bool = False): ... + def load_document(self, id): ... + def get(self, *ids): ... + def info(self): ... + def get_params_args(self, query_params: _QueryParams) -> list[Any]: ... + def search(self, query: str | Query, query_params: _QueryParams | None = None) -> Result: ... + def explain(self, query: str | Query, query_params: _QueryParams | None = None): ... + def explain_cli(self, query): ... + def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = None) -> AggregateResult: ... + def profile( + self, query: str | Query | AggregateRequest, limited: bool = False, query_params: Mapping[str, str | float] | None = None + ) -> tuple[Incomplete, Incomplete]: ... + def spellcheck( + self, query, distance: Incomplete | None = None, include: Incomplete | None = None, exclude: Incomplete | None = None + ): ... + def dict_add(self, name, *terms): ... + def dict_del(self, name, *terms): ... + def dict_dump(self, name): ... + def config_set(self, option: str, value: str) -> bool: ... + def config_get(self, option: str) -> dict[str, str]: ... + def tagvals(self, tagfield): ... + def aliasadd(self, alias): ... + def aliasupdate(self, alias): ... + def aliasdel(self, alias): ... + def sugadd(self, key, *suggestions, **kwargs): ... + def suglen(self, key): ... + def sugdel(self, key, string): ... + def sugget(self, key, prefix, fuzzy: bool = False, num: int = 10, with_scores: bool = False, with_payloads: bool = False): ... + def synupdate(self, groupid, skipinitial: bool = False, *terms): ... + def syndump(self): ... diff --git a/valkey/commands/search/query.pyi b/valkey/commands/search/query.pyi new file mode 100644 index 00000000..eb1846ba --- /dev/null +++ b/valkey/commands/search/query.pyi @@ -0,0 +1,52 @@ +from _typeshed import Incomplete +from typing import Any + +class Query: + def __init__(self, query_string) -> None: ... + def query_string(self): ... + def limit_ids(self, *ids): ... + def return_fields(self, *fields): ... + def return_field(self, field, as_field: Incomplete | None = None): ... + def summarize( + self, + fields: Incomplete | None = None, + context_len: Incomplete | None = None, + num_frags: Incomplete | None = None, + sep: Incomplete | None = None, + ): ... + def highlight(self, fields: Incomplete | None = None, tags: Incomplete | None = None): ... + def language(self, language): ... + def slop(self, slop): ... + def in_order(self): ... + def scorer(self, scorer): ... + def get_args(self): ... + def paging(self, offset, num): ... + def verbatim(self): ... + def no_content(self): ... + def no_stopwords(self): ... + def with_payloads(self): ... + def with_scores(self): ... + def limit_fields(self, *fields): ... + def add_filter(self, flt): ... + def sort_by(self, field, asc: bool = True): ... + def expander(self, expander): ... + +class Filter: + args: Any + def __init__(self, keyword, field, *args) -> None: ... + +class NumericFilter(Filter): + INF: str + NEG_INF: str + def __init__(self, field, minval, maxval, minExclusive: bool = False, maxExclusive: bool = False) -> None: ... + +class GeoFilter(Filter): + METERS: str + KILOMETERS: str + FEET: str + MILES: str + def __init__(self, field, lon, lat, radius, unit="km") -> None: ... + +class SortbyField: + args: Any + def __init__(self, field, asc: bool = True) -> None: ... diff --git a/valkey/commands/search/result.pyi b/valkey/commands/search/result.pyi new file mode 100644 index 00000000..046c3170 --- /dev/null +++ b/valkey/commands/search/result.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class Result: + total: Any + duration: Any + docs: Any + def __init__(self, res, hascontent, duration: int = 0, has_payload: bool = False, with_scores: bool = False) -> None: ... diff --git a/valkey/commands/sentinel.pyi b/valkey/commands/sentinel.pyi new file mode 100644 index 00000000..b526a45f --- /dev/null +++ b/valkey/commands/sentinel.pyi @@ -0,0 +1,17 @@ +class SentinelCommands: + def sentinel(self, *args): ... + def sentinel_get_master_addr_by_name(self, service_name): ... + def sentinel_master(self, service_name): ... + def sentinel_masters(self): ... + def sentinel_monitor(self, name, ip, port, quorum): ... + def sentinel_remove(self, name): ... + def sentinel_sentinels(self, service_name): ... + def sentinel_set(self, name, option, value): ... + def sentinel_slaves(self, service_name): ... + def sentinel_reset(self, pattern): ... + def sentinel_failover(self, new_master_name): ... + def sentinel_ckquorum(self, new_master_name): ... + def sentinel_flushconfig(self): ... + +class AsyncSentinelCommands(SentinelCommands): + async def sentinel(self, *args) -> None: ... diff --git a/valkey/commands/timeseries/__init__.pyi b/valkey/commands/timeseries/__init__.pyi new file mode 100644 index 00000000..95457d6f --- /dev/null +++ b/valkey/commands/timeseries/__init__.pyi @@ -0,0 +1,14 @@ +from _typeshed import Incomplete +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import TimeSeriesCommands + +class TimeSeries(TimeSeriesCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + def __init__(self, client: Incomplete | None = None, **kwargs) -> None: ... + def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ... + +class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ... # type: ignore[misc] diff --git a/valkey/commands/timeseries/commands.pyi b/valkey/commands/timeseries/commands.pyi new file mode 100644 index 00000000..ed70e575 --- /dev/null +++ b/valkey/commands/timeseries/commands.pyi @@ -0,0 +1,160 @@ +from typing import Literal +from typing_extensions import TypeAlias + +_Key: TypeAlias = bytes | str | memoryview + +ADD_CMD: Literal["TS.ADD"] +ALTER_CMD: Literal["TS.ALTER"] +CREATERULE_CMD: Literal["TS.CREATERULE"] +CREATE_CMD: Literal["TS.CREATE"] +DECRBY_CMD: Literal["TS.DECRBY"] +DELETERULE_CMD: Literal["TS.DELETERULE"] +DEL_CMD: Literal["TS.DEL"] +GET_CMD: Literal["TS.GET"] +INCRBY_CMD: Literal["TS.INCRBY"] +INFO_CMD: Literal["TS.INFO"] +MADD_CMD: Literal["TS.MADD"] +MGET_CMD: Literal["TS.MGET"] +MRANGE_CMD: Literal["TS.MRANGE"] +MREVRANGE_CMD: Literal["TS.MREVRANGE"] +QUERYINDEX_CMD: Literal["TS.QUERYINDEX"] +RANGE_CMD: Literal["TS.RANGE"] +REVRANGE_CMD: Literal["TS.REVRANGE"] + +class TimeSeriesCommands: + def create( + self, + key: _Key, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def alter( + self, + key: _Key, + retention_msecs: int | None = None, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def add( + self, + key: _Key, + timestamp: int | str, + value: float, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + duplicate_policy: str | None = None, + ): ... + def madd(self, ktv_tuples): ... + def incrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = None, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + ): ... + def decrby( + self, + key: _Key, + value: float, + timestamp: int | str | None = None, + retention_msecs: int | None = None, + uncompressed: bool | None = False, + labels: dict[str, str] | None = None, + chunk_size: int | None = None, + ): ... + def delete(self, key, from_time, to_time): ... + def createrule( + self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = None + ): ... + def deleterule(self, source_key, dest_key): ... + def range( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def revrange( + self, + key: _Key, + from_time: int | str, + to_time: int | str, + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def mrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + with_labels: bool | None = False, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + groupby: str | None = None, + reduce: str | None = None, + select_labels: list[str] | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def mrevrange( + self, + from_time: int | str, + to_time: int | str, + filters: list[str], + count: int | None = None, + aggregation_type: str | None = None, + bucket_size_msec: int | None = 0, + with_labels: bool | None = False, + filter_by_ts: list[int] | None = None, + filter_by_min_value: int | None = None, + filter_by_max_value: int | None = None, + groupby: str | None = None, + reduce: str | None = None, + select_labels: list[str] | None = None, + align: int | str | None = None, + latest: bool | None = False, + bucket_timestamp: str | None = None, + empty: bool | None = False, + ): ... + def get(self, key: _Key, latest: bool | None = False): ... + def mget( + self, + filters: list[str], + with_labels: bool | None = False, + select_labels: list[str] | None = None, + latest: bool | None = False, + ): ... + def info(self, key): ... + def queryindex(self, filters): ... diff --git a/valkey/commands/timeseries/info.pyi b/valkey/commands/timeseries/info.pyi new file mode 100644 index 00000000..8b082c7d --- /dev/null +++ b/valkey/commands/timeseries/info.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from typing import Any + +class TSInfo: + rules: list[Any] + labels: list[Any] + sourceKey: Incomplete | None + chunk_count: Incomplete | None + memory_usage: Incomplete | None + total_samples: Incomplete | None + retention_msecs: Incomplete | None + last_time_stamp: Incomplete | None + first_time_stamp: Incomplete | None + + max_samples_per_chunk: Incomplete | None + chunk_size: Incomplete | None + duplicate_policy: Incomplete | None + def __init__(self, args) -> None: ... diff --git a/valkey/commands/timeseries/utils.pyi b/valkey/commands/timeseries/utils.pyi new file mode 100644 index 00000000..4a0d52c4 --- /dev/null +++ b/valkey/commands/timeseries/utils.pyi @@ -0,0 +1,5 @@ +def list_to_dict(aList): ... +def parse_range(response): ... +def parse_m_range(response): ... +def parse_get(response): ... +def parse_m_get(response): ... diff --git a/valkey/connection.pyi b/valkey/connection.pyi new file mode 100644 index 00000000..9796fd21 --- /dev/null +++ b/valkey/connection.pyi @@ -0,0 +1,289 @@ +from _typeshed import Incomplete, Unused +from abc import abstractmethod +from collections.abc import Callable, Iterable, Mapping +from queue import Queue +from socket import socket +from typing import Any, ClassVar +from typing_extensions import Self, TypeAlias + +from .credentials import CredentialProvider +from .retry import Retry + +ssl_available: bool +SYM_STAR: bytes +SYM_DOLLAR: bytes +SYM_CRLF: bytes +SYM_EMPTY: bytes +SERVER_CLOSED_CONNECTION_ERROR: str +NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...] +NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int] +SENTINEL: object +MODULE_LOAD_ERROR: str +NO_SUCH_MODULE_ERROR: str +MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str +MODULE_EXPORTS_DATA_TYPES_ERROR: str +FALSE_STRINGS: tuple[str, ...] +URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]] + +# Options as passed to Pool.get_connection(). +_ConnectionPoolOptions: TypeAlias = Any +_ConnectFunc: TypeAlias = Callable[[Connection], object] + +class BaseParser: + EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]] + @classmethod + def parse_error(cls, response: str) -> Exception: ... + +class SocketBuffer: + socket_read_size: int + bytes_written: int + bytes_read: int + socket_timeout: float | None + def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ... + def unread_bytes(self) -> int: ... + def can_read(self, timeout: float | None) -> bool: ... + def read(self, length: int) -> bytes: ... + def readline(self) -> bytes: ... + def get_pos(self) -> int: ... + def rewind(self, pos: int) -> None: ... + def purge(self) -> None: ... + def close(self) -> None: ... + +class PythonParser(BaseParser): + encoding: str + socket_read_size: int + encoder: Encoder | None + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]` + +class LibvalkeyParser(BaseParser): + socket_read_size: int + def __init__(self, socket_read_size: int) -> None: ... + def __del__(self) -> None: ... + def on_connect(self, connection: Connection, **kwargs) -> None: ... + def on_disconnect(self) -> None: ... + def can_read(self, timeout: float | None) -> bool: ... + def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = True) -> bool: ... + def read_response(self, disable_decoding: bool = False) -> Any: ... # `str | bytes` or `list[str | bytes]` + +DefaultParser: type[BaseParser] # Libvalkey or PythonParser + +_Encodable: TypeAlias = str | bytes | memoryview | bool | float + +class Encoder: + encoding: str + encoding_errors: str + decode_responses: bool + def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ... + def encode(self, value: _Encodable) -> bytes: ... + def decode(self, value: str | bytes | memoryview, force: bool = False) -> str: ... + +class AbstractConnection: + pid: int + db: int + client_name: str | None + credential_provider: CredentialProvider | None + password: str | None + username: str | None + socket_timeout: float | None + socket_connect_timeout: float | None + retry_on_timeout: bool + retry_on_error: list[type[Exception]] + retry: Retry + health_check_interval: int + next_health_check: int + valkey_connect_func: _ConnectFunc | None + encoder: Encoder + + def __init__( + self, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + @abstractmethod + def repr_pieces(self) -> list[tuple[str, Any]]: ... + def register_connect_callback(self, callback: _ConnectFunc) -> None: ... + def clear_connect_callbacks(self) -> None: ... + def set_parser(self, parser_class: type[BaseParser]) -> None: ... + def connect(self) -> None: ... + def on_connect(self) -> None: ... + def disconnect(self, *args: Unused) -> None: ... # 'args' added in valkey 4.1.2 + def check_health(self) -> None: ... + def send_packed_command(self, command: str | Iterable[str], check_health: bool = True) -> None: ... + def send_command(self, *args, **kwargs) -> None: ... + def can_read(self, timeout: float | None = 0) -> bool: ... + def read_response( + self, disable_decoding: bool = False, *, disconnect_on_error: bool = True + ) -> Any: ... # `str | bytes` or `list[str | bytes]` + def pack_command(self, *args) -> list[bytes]: ... + def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ... + +class Connection(AbstractConnection): + host: str + port: int + socket_keepalive: bool + socket_keepalive_options: Mapping[str, int | str] + socket_type: int + def __init__( + self, + host: str = "localhost", + port: int = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[str, int | str] | None = None, + socket_type: int = 0, + *, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +class SSLConnection(Connection): + keyfile: Any + certfile: Any + cert_reqs: Any + ca_certs: Any + ca_path: Incomplete | None + check_hostname: bool + certificate_password: Incomplete | None + ssl_validate_ocsp: bool + ssl_validate_ocsp_stapled: bool # added in 4.1.1 + ssl_ocsp_context: Incomplete | None # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None # added in 4.1.1 + def __init__( + self, + ssl_keyfile=None, + ssl_certfile=None, + ssl_cert_reqs="required", + ssl_ca_certs=None, + ssl_ca_data: Incomplete | None = None, + ssl_check_hostname: bool = False, + ssl_ca_path: Incomplete | None = None, + ssl_password: Incomplete | None = None, + ssl_validate_ocsp: bool = False, + ssl_validate_ocsp_stapled: bool = False, # added in 4.1.1 + ssl_ocsp_context: Incomplete | None = None, # added in 4.1.1 + ssl_ocsp_expected_cert: Incomplete | None = None, # added in 4.1.1 + *, + host: str = "localhost", + port: int = 6379, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + socket_keepalive: bool = False, + socket_keepalive_options: Mapping[str, int | str] | None = None, + socket_type: int = 0, + db: int = 0, + password: str | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + +class UnixDomainSocketConnection(AbstractConnection): + path: str + def __init__( + self, + path: str = "", + *, + db: int = 0, + password: str | None = None, + socket_timeout: float | None = None, + socket_connect_timeout: float | None = None, + retry_on_timeout: bool = False, + retry_on_error: list[type[Exception]] = ..., + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: type[BaseParser] = ..., + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: str | None = None, + username: str | None = None, + retry: Retry | None = None, + valkey_connect_func: _ConnectFunc | None = None, + credential_provider: CredentialProvider | None = None, + command_packer: Incomplete | None = None, + ) -> None: ... + def repr_pieces(self) -> list[tuple[str, Any]]: ... + +# TODO: make generic on `connection_class` +class ConnectionPool: + connection_class: type[Connection] + connection_kwargs: dict[str, Any] + max_connections: int + pid: int + @classmethod + def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ... + def __init__( + self, connection_class: type[AbstractConnection] = ..., max_connections: int | None = None, **connection_kwargs + ) -> None: ... + def reset(self) -> None: ... + def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ... + def make_connection(self) -> Connection: ... + def release(self, connection: Connection) -> None: ... + def disconnect(self, inuse_connections: bool = True) -> None: ... + def get_encoder(self) -> Encoder: ... + def owns_connection(self, connection: Connection) -> bool: ... + +class BlockingConnectionPool(ConnectionPool): + queue_class: type[Queue[Any]] + timeout: float + pool: Queue[Connection | None] # might not be defined + def __init__( + self, + max_connections: int = 50, + timeout: float = 20, + connection_class: type[Connection] = ..., + queue_class: type[Queue[Any]] = ..., + **connection_kwargs, + ) -> None: ... + def disconnect(self) -> None: ... # type: ignore[override] + +def to_bool(value: object) -> bool: ... +def parse_url(url: str) -> dict[str, Any]: ... diff --git a/valkey/crc.pyi b/valkey/crc.pyi new file mode 100644 index 00000000..d808e657 --- /dev/null +++ b/valkey/crc.pyi @@ -0,0 +1,5 @@ +from valkey.typing import EncodedT + +VALKEY_CLUSTER_HASH_SLOTS: int + +def key_slot(key: EncodedT, bucket: int = 16384) -> int: ... diff --git a/valkey/credentials.pyi b/valkey/credentials.pyi new file mode 100644 index 00000000..7a2d78ec --- /dev/null +++ b/valkey/credentials.pyi @@ -0,0 +1,11 @@ +from abc import abstractmethod + +class CredentialProvider: + @abstractmethod + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... + +class UsernamePasswordCredentialProvider(CredentialProvider): + username: str + password: str + def __init__(self, username: str | None = None, password: str | None = None) -> None: ... + def get_credentials(self) -> tuple[str] | tuple[str, str]: ... diff --git a/valkey/exceptions.pyi b/valkey/exceptions.pyi new file mode 100644 index 00000000..9f671ad7 --- /dev/null +++ b/valkey/exceptions.pyi @@ -0,0 +1,42 @@ +class ValkeyError(Exception): ... +class AuthenticationError(ValkeyError): ... +class ConnectionError(ValkeyError): ... +class TimeoutError(ValkeyError): ... +class AuthorizationError(ConnectionError): ... +class BusyLoadingError(ConnectionError): ... +class InvalidResponse(ValkeyError): ... +class ResponseError(ValkeyError): ... +class DataError(ValkeyError): ... +class PubSubError(ValkeyError): ... +class WatchError(ValkeyError): ... +class NoScriptError(ResponseError): ... +class ExecAbortError(ResponseError): ... +class ReadOnlyError(ResponseError): ... +class NoPermissionError(ResponseError): ... +class ModuleError(ResponseError): ... +class LockError(ValkeyError, ValueError): ... +class LockNotOwnedError(LockError): ... +class ChildDeadlockedError(Exception): ... +class AuthenticationWrongNumberOfArgsError(ResponseError): ... +class ValkeyClusterException(Exception): ... +class ClusterError(ValkeyError): ... + +class ClusterDownError(ClusterError, ResponseError): + args: tuple[str] + message: str + def __init__(self, resp: str) -> None: ... + +class AskError(ResponseError): + args: tuple[str] + message: str + slot_id: int + node_addr: tuple[str, int] + host: str + port: int + def __init__(self, resp: str) -> None: ... + +class TryAgainError(ResponseError): ... +class ClusterCrossSlotError(ResponseError): ... +class MovedError(AskError): ... +class MasterDownError(ClusterDownError): ... +class SlotNotCoveredError(ValkeyClusterException): ... diff --git a/valkey/lock.pyi b/valkey/lock.pyi new file mode 100644 index 00000000..81d1dcac --- /dev/null +++ b/valkey/lock.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete +from types import TracebackType +from typing import Any, ClassVar, Protocol +from typing_extensions import Self + +from valkey.client import Valkey + +class _Local(Protocol): + token: str | bytes | None + +class Lock: + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + LUA_RELEASE_SCRIPT: ClassVar[str] + lua_extend: ClassVar[Incomplete | None] + lua_reacquire: ClassVar[Incomplete | None] + lua_release: ClassVar[Incomplete | None] + valkey: Valkey[Any] + name: str + timeout: float | None + sleep: float + blocking: bool + blocking_timeout: float | None + thread_local: bool + local: _Local + def __init__( + self, + valkey: Valkey[Any], + name: str, + timeout: float | None = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: float | None = None, + thread_local: bool = True, + ) -> None: ... + def register_scripts(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + def acquire( + self, + sleep: float | None = None, + blocking: bool | None = None, + blocking_timeout: float | None = None, + token: str | bytes | None = None, + ) -> bool: ... + def do_acquire(self, token: str | bytes) -> bool: ... + def locked(self) -> bool: ... + def owned(self) -> bool: ... + def release(self) -> None: ... + def do_release(self, expected_token: str | bytes) -> None: ... + def extend(self, additional_time: float, replace_ttl: bool = False) -> bool: ... + def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ... + def reacquire(self) -> bool: ... + def do_reacquire(self) -> bool: ... diff --git a/valkey/ocsp.pyi b/valkey/ocsp.pyi new file mode 100644 index 00000000..5fc72e08 --- /dev/null +++ b/valkey/ocsp.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from ssl import SSLObject, SSLSocket +from typing import Literal + +from cryptography.x509.base import Certificate +from OpenSSL.SSL import Connection + +def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = None) -> Literal[True]: ... + +class OCSPVerifier: + SOCK: SSLObject | SSLSocket + HOST: str + PORT: int + CA_CERTS: str | None + def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = None) -> None: ... + # cryptography.x509.general_name.GeneralName.value is typed as Any + def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ... + def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ... + def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ... + def is_valid(self) -> Literal[True]: ... diff --git a/valkey/retry.pyi b/valkey/retry.pyi new file mode 100644 index 00000000..ab727e66 --- /dev/null +++ b/valkey/retry.pyi @@ -0,0 +1,11 @@ +from collections.abc import Callable, Iterable +from typing import TypeVar + +from valkey.backoff import AbstractBackoff + +_T = TypeVar("_T") + +class Retry: + def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ... + def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ... + def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ... diff --git a/valkey/sentinel.pyi b/valkey/sentinel.pyi new file mode 100644 index 00000000..4a4c9489 --- /dev/null +++ b/valkey/sentinel.pyi @@ -0,0 +1,62 @@ +from collections.abc import Iterable, Iterator +from typing import Any, Literal, TypeVar, overload +from typing_extensions import TypeAlias + +from valkey.client import Valkey +from valkey.commands.sentinel import SentinelCommands +from valkey.connection import Connection, ConnectionPool, SSLConnection +from valkey.exceptions import ConnectionError + +_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any]) +_AddressAndPort: TypeAlias = tuple[str, int] +_SentinelState: TypeAlias = dict[str, Any] # TODO: this can be a TypedDict + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: SentinelConnectionPool + def __init__(self, *, connection_pool: SentinelConnectionPool, **kwargs) -> None: ... + def connect_to(self, address: _AddressAndPort) -> None: ... + def connect(self) -> None: ... + # The result can be either `str | bytes` or `list[str | bytes]` + def read_response(self, disable_decoding: bool = False, *, disconnect_on_error: bool = False) -> Any: ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool): + is_master: bool + check_connection: bool + service_name: str + sentinel_manager: Sentinel + def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ... + def reset(self) -> None: ... + def owns_connection(self, connection: Connection) -> bool: ... + def get_master_address(self) -> _AddressAndPort: ... + def rotate_slaves(self) -> Iterator[_AddressAndPort]: ... + +class Sentinel(SentinelCommands): + sentinel_kwargs: dict[str, Any] + sentinels: list[Valkey[Any]] + min_other_sentinels: int + connection_kwargs: dict[str, Any] + def __init__( + self, + sentinels: Iterable[_AddressAndPort], + min_other_sentinels: int = 0, + sentinel_kwargs: dict[str, Any] | None = None, + **connection_kwargs, + ) -> None: ... + def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ... + def discover_master(self, service_name: str) -> _AddressAndPort: ... + def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ... + def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ... + @overload + def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ... + @overload + def master_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ... + @overload + def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ... + @overload + def slave_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ... + def execute_command(self, *args, **kwargs) -> Literal[True]: ... diff --git a/valkey/typing.pyi b/valkey/typing.pyi new file mode 100644 index 00000000..dce33cb2 --- /dev/null +++ b/valkey/typing.pyi @@ -0,0 +1,34 @@ +from collections.abc import Iterable +from datetime import datetime, timedelta +from typing import Any, Protocol, TypeVar +from typing_extensions import TypeAlias + +from valkey.asyncio.connection import ConnectionPool as AsyncConnectionPool +from valkey.connection import ConnectionPool + +# The following type aliases exist at runtime. +EncodedT: TypeAlias = bytes | memoryview +DecodedT: TypeAlias = str | int | float +EncodableT: TypeAlias = EncodedT | DecodedT +AbsExpiryT: TypeAlias = int | datetime +ExpiryT: TypeAlias = int | timedelta +ZScoreBoundT: TypeAlias = float | str +BitfieldOffsetT: TypeAlias = int | str +_StringLikeT: TypeAlias = bytes | str | memoryview # noqa: Y043 +KeyT: TypeAlias = _StringLikeT +PatternT: TypeAlias = _StringLikeT +FieldT: TypeAlias = EncodableT +KeysT: TypeAlias = KeyT | Iterable[KeyT] +ChannelT: TypeAlias = _StringLikeT +GroupT: TypeAlias = _StringLikeT +ConsumerT: TypeAlias = _StringLikeT +StreamIdT: TypeAlias = int | _StringLikeT +ScriptTextT: TypeAlias = _StringLikeT +TimeoutSecT: TypeAlias = int | float | _StringLikeT +AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) # noqa: Y001 +AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) # noqa: Y001 +AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) # noqa: Y001 + +class CommandsProtocol(Protocol): + connection_pool: AsyncConnectionPool[Any] | ConnectionPool + def execute_command(self, *args, **options): ... diff --git a/valkey/utils.pyi b/valkey/utils.pyi new file mode 100644 index 00000000..de41c112 --- /dev/null +++ b/valkey/utils.pyi @@ -0,0 +1,22 @@ +from _typeshed import Unused +from collections.abc import Iterable, Mapping +from contextlib import AbstractContextManager +from typing import Any, Literal, TypeVar, overload + +from .client import Pipeline, Valkey, _StrType + +_T = TypeVar("_T") + +LIBVALKEY_AVAILABLE: bool +CRYPTOGRAPHY_AVAILABLE: bool + +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Valkey[str]: ... +@overload +def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = False, **kwargs: Any) -> Valkey[bytes]: ... +def pipeline(valkey_obj: Valkey[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ... +def str_if_bytes(value: str | bytes) -> str: ... +def safe_str(value: object) -> str: ... +def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ... +def list_keys_to_dict(key_list, callback): ... # unused, alias for `dict.fromkeys` +def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ...