diff --git a/.github/wordlist.txt b/.github/wordlist.txt index 3ea543748e..29bcaa9d77 100644 --- a/.github/wordlist.txt +++ b/.github/wordlist.txt @@ -12,7 +12,6 @@ ConnectionPool CoreCommands EVAL EVALSHA -GraphCommands Grokzen's INCR IOError @@ -39,7 +38,6 @@ RedisCluster RedisClusterCommands RedisClusterException RedisClusters -RedisGraph RedisInstrumentor RedisJSON RedisTimeSeries diff --git a/.github/workflows/install_and_test.sh b/.github/workflows/install_and_test.sh index 778dbe0b20..e647126539 100755 --- a/.github/workflows/install_and_test.sh +++ b/.github/workflows/install_and_test.sh @@ -40,9 +40,9 @@ cd ${TESTDIR} # install, run tests pip install ${PKG} # Redis tests -pytest -m 'not onlycluster and not graph' +pytest -m 'not onlycluster' # RedisCluster tests CLUSTER_URL="redis://localhost:16379/0" CLUSTER_SSL_URL="rediss://localhost:27379/0" -pytest -m 'not onlynoncluster and not redismod and not ssl and not graph' \ +pytest -m 'not onlynoncluster and not redismod and not ssl' \ --redis-url="${CLUSTER_URL}" --redis-ssl-url="${CLUSTER_SSL_URL}" diff --git a/CHANGES b/CHANGES index 031d909f23..24b52c54db 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,4 @@ + * Removing support for RedisGraph module. RedisGraph support is deprecated since Redis Stack 7.2 (https://redis.com/blog/redisgraph-eol/) * Fix lock.extend() typedef to accept float TTL extension * Update URL in the readme linking to Redis University * Move doctests (doc code examples) to main branch diff --git a/docker-compose.yml b/docker-compose.yml index 8ca3471311..76a60398f3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -105,11 +105,3 @@ services: - standalone - all-stack - all - - redis-stack-graph: - image: redis/redis-stack-server:6.2.6-v15 - container_name: redis-stack-graph - ports: - - 6480:6379 - profiles: - - graph diff --git a/docs/redismodules.rst b/docs/redismodules.rst index 27757cb692..07914fff12 100644 --- a/docs/redismodules.rst +++ b/docs/redismodules.rst @@ -51,39 +51,6 @@ These are the commands for interacting with the `RedisBloom module `_. Below is a brief example, as well as documentation on the commands themselves. - -**Create a graph, adding two nodes** - -.. code-block:: python - - import redis - from redis.graph.node import Node - - john = Node(label="person", properties={"name": "John Doe", "age": 33} - jane = Node(label="person", properties={"name": "Jane Doe", "age": 34} - - r = redis.Redis() - graph = r.graph() - graph.add_node(john) - graph.add_node(jane) - graph.add_node(pat) - graph.commit() - -.. automodule:: redis.commands.graph.node - :members: Node - -.. automodule:: redis.commands.graph.edge - :members: Edge - -.. automodule:: redis.commands.graph.commands - :members: GraphCommands - ------- - RedisJSON Commands ****************** diff --git a/pyproject.toml b/pyproject.toml index 9c868be4b7..ab3e4cd77e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,14 +9,8 @@ description = "Python client for Redis database and key-value store" readme = "README.md" license = "MIT" requires-python = ">=3.8" -authors = [ - { name = "Redis Inc.", email = "oss@redis.com" }, -] -keywords = [ - "Redis", - "database", - "key-value-store", -] +authors = [{ name = "Redis Inc.", email = "oss@redis.com" }] +keywords = ["Redis", "database", "key-value-store"] classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", @@ -35,9 +29,7 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] -dependencies = [ - 'async-timeout>=4.0.3; python_full_version<"3.11.3"', -] +dependencies = ['async-timeout>=4.0.3; python_full_version<"3.11.3"'] [project.optional-dependencies] hiredis = [ @@ -63,22 +55,15 @@ Homepage = "https://github.com/redis/redis-py" path = "redis/__init__.py" [tool.hatch.build.targets.sdist] -include = [ - "/redis", - "/tests", - "dev_requirements.txt", -] +include = ["/redis", "/tests", "dev_requirements.txt"] [tool.hatch.build.targets.wheel] -include = [ - "/redis", -] +include = ["/redis"] [tool.pytest.ini_options] addopts = "-s" markers = [ "redismod: run only the redis module tests", - "graph: run only the redisgraph tests", "pipeline: pipeline tests", "onlycluster: marks tests to be run only with cluster mode redis", "onlynoncluster: marks tests to be run only with standalone redis", @@ -93,7 +78,6 @@ asyncio_mode = "auto" timeout = 30 filterwarnings = [ "always", - "ignore:RedisGraph support is deprecated as of Redis Stack 7.2:DeprecationWarning", # Ignore a coverage warning when COVERAGE_CORE=sysmon for Pythons < 3.12. "ignore:sys.monitoring isn't available:coverage.exceptions.CoverageWarning", ] @@ -118,32 +102,23 @@ exclude = [ [tool.ruff.lint] ignore = [ - "E501", # line too long (taken care of with ruff format) - "E741", # ambiguous variable name - "N818", # Errors should have Error suffix + "E501", # line too long (taken care of with ruff format) + "E741", # ambiguous variable name + "N818", # Errors should have Error suffix ] -select = [ - "E", - "F", - "FLY", - "I", - "N", - "W", -] +select = ["E", "F", "FLY", "I", "N", "W"] [tool.ruff.lint.per-file-ignores] "redis/commands/bf/*" = [ # the `bf` module uses star imports, so this is required there. - "F405", # name may be undefined, or defined from star imports -] -"redis/commands/{bf,timeseries,json,search}/*" = [ - "N", + "F405", # name may be undefined, or defined from star imports ] +"redis/commands/{bf,timeseries,json,search}/*" = ["N"] "tests/*" = [ - "I", # TODO: could be enabled, plenty of changes - "N801", # class name should use CapWords convention - "N803", # argument name should be lowercase - "N802", # function name should be lowercase - "N806", # variable name should be lowercase + "I", # TODO: could be enabled, plenty of changes + "N801", # class name should use CapWords convention + "N803", # argument name should be lowercase + "N802", # function name should be lowercase + "N806", # variable name should be lowercase ] diff --git a/redis/cluster.py b/redis/cluster.py index c9523e2a76..13253ec896 100644 --- a/redis/cluster.py +++ b/redis/cluster.py @@ -288,7 +288,6 @@ class AbstractRedisCluster: "TFUNCTION LIST", "TFCALL", "TFCALLASYNC", - "GRAPH.CONFIG", "LATENCY HISTORY", "LATENCY LATEST", "LATENCY RESET", diff --git a/redis/commands/graph/__init__.py b/redis/commands/graph/__init__.py deleted file mode 100644 index ddc0e34f4c..0000000000 --- a/redis/commands/graph/__init__.py +++ /dev/null @@ -1,263 +0,0 @@ -import warnings - -from ..helpers import quote_string, random_string, stringify_param_value -from .commands import AsyncGraphCommands, GraphCommands -from .edge import Edge # noqa -from .node import Node # noqa -from .path import Path # noqa - -DB_LABELS = "DB.LABELS" -DB_RAELATIONSHIPTYPES = "DB.RELATIONSHIPTYPES" -DB_PROPERTYKEYS = "DB.PROPERTYKEYS" - - -class Graph(GraphCommands): - """ - Graph, collection of nodes and edges. - """ - - def __init__(self, client, name=random_string()): - """ - Create a new graph. - """ - warnings.warn( - DeprecationWarning( - "RedisGraph support is deprecated as of Redis Stack 7.2 \ - (https://redis.com/blog/redisgraph-eol/)" - ) - ) - self.NAME = name # Graph key - self.client = client - self.execute_command = client.execute_command - - self.nodes = {} - self.edges = [] - self._labels = [] # List of node labels. - self._properties = [] # List of properties. - self._relationship_types = [] # List of relation types. - self.version = 0 # Graph version - - @property - def name(self): - return self.NAME - - def _clear_schema(self): - self._labels = [] - self._properties = [] - self._relationship_types = [] - - def _refresh_schema(self): - self._clear_schema() - self._refresh_labels() - self._refresh_relations() - self._refresh_attributes() - - def _refresh_labels(self): - lbls = self.labels() - - # Unpack data. - self._labels = [l[0] for _, l in enumerate(lbls)] - - def _refresh_relations(self): - rels = self.relationship_types() - - # Unpack data. - self._relationship_types = [r[0] for _, r in enumerate(rels)] - - def _refresh_attributes(self): - props = self.property_keys() - - # Unpack data. - self._properties = [p[0] for _, p in enumerate(props)] - - def get_label(self, idx): - """ - Returns a label by it's index - - Args: - - idx: - The index of the label - """ - try: - label = self._labels[idx] - except IndexError: - # Refresh labels. - self._refresh_labels() - label = self._labels[idx] - return label - - def get_relation(self, idx): - """ - Returns a relationship type by it's index - - Args: - - idx: - The index of the relation - """ - try: - relationship_type = self._relationship_types[idx] - except IndexError: - # Refresh relationship types. - self._refresh_relations() - relationship_type = self._relationship_types[idx] - return relationship_type - - def get_property(self, idx): - """ - Returns a property by it's index - - Args: - - idx: - The index of the property - """ - try: - p = self._properties[idx] - except IndexError: - # Refresh properties. - self._refresh_attributes() - p = self._properties[idx] - return p - - def add_node(self, node): - """ - Adds a node to the graph. - """ - if node.alias is None: - node.alias = random_string() - self.nodes[node.alias] = node - - def add_edge(self, edge): - """ - Adds an edge to the graph. - """ - if not (self.nodes[edge.src_node.alias] and self.nodes[edge.dest_node.alias]): - raise AssertionError("Both edge's end must be in the graph") - - self.edges.append(edge) - - def _build_params_header(self, params): - if params is None: - return "" - if not isinstance(params, dict): - raise TypeError("'params' must be a dict") - # Header starts with "CYPHER" - params_header = "CYPHER " - for key, value in params.items(): - params_header += str(key) + "=" + stringify_param_value(value) + " " - return params_header - - # Procedures. - def call_procedure(self, procedure, *args, read_only=False, **kwagrs): - args = [quote_string(arg) for arg in args] - q = f"CALL {procedure}({','.join(args)})" - - y = kwagrs.get("y", None) - if y is not None: - q += f"YIELD {','.join(y)}" - - return self.query(q, read_only=read_only) - - def labels(self): - return self.call_procedure(DB_LABELS, read_only=True).result_set - - def relationship_types(self): - return self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True).result_set - - def property_keys(self): - return self.call_procedure(DB_PROPERTYKEYS, read_only=True).result_set - - -class AsyncGraph(Graph, AsyncGraphCommands): - """Async version for Graph""" - - async def _refresh_labels(self): - lbls = await self.labels() - - # Unpack data. - self._labels = [l[0] for _, l in enumerate(lbls)] - - async def _refresh_attributes(self): - props = await self.property_keys() - - # Unpack data. - self._properties = [p[0] for _, p in enumerate(props)] - - async def _refresh_relations(self): - rels = await self.relationship_types() - - # Unpack data. - self._relationship_types = [r[0] for _, r in enumerate(rels)] - - async def get_label(self, idx): - """ - Returns a label by it's index - - Args: - - idx: - The index of the label - """ - try: - label = self._labels[idx] - except IndexError: - # Refresh labels. - await self._refresh_labels() - label = self._labels[idx] - return label - - async def get_property(self, idx): - """ - Returns a property by it's index - - Args: - - idx: - The index of the property - """ - try: - p = self._properties[idx] - except IndexError: - # Refresh properties. - await self._refresh_attributes() - p = self._properties[idx] - return p - - async def get_relation(self, idx): - """ - Returns a relationship type by it's index - - Args: - - idx: - The index of the relation - """ - try: - relationship_type = self._relationship_types[idx] - except IndexError: - # Refresh relationship types. - await self._refresh_relations() - relationship_type = self._relationship_types[idx] - return relationship_type - - async def call_procedure(self, procedure, *args, read_only=False, **kwagrs): - args = [quote_string(arg) for arg in args] - q = f"CALL {procedure}({','.join(args)})" - - y = kwagrs.get("y", None) - if y is not None: - f"YIELD {','.join(y)}" - return await self.query(q, read_only=read_only) - - async def labels(self): - return (await self.call_procedure(DB_LABELS, read_only=True)).result_set - - async def property_keys(self): - return (await self.call_procedure(DB_PROPERTYKEYS, read_only=True)).result_set - - async def relationship_types(self): - return ( - await self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True) - ).result_set diff --git a/redis/commands/graph/commands.py b/redis/commands/graph/commands.py deleted file mode 100644 index 1e41a5fb1f..0000000000 --- a/redis/commands/graph/commands.py +++ /dev/null @@ -1,311 +0,0 @@ -from redis import DataError -from redis.exceptions import ResponseError - -from .exceptions import VersionMismatchException -from .execution_plan import ExecutionPlan -from .query_result import AsyncQueryResult, QueryResult - -PROFILE_CMD = "GRAPH.PROFILE" -RO_QUERY_CMD = "GRAPH.RO_QUERY" -QUERY_CMD = "GRAPH.QUERY" -DELETE_CMD = "GRAPH.DELETE" -SLOWLOG_CMD = "GRAPH.SLOWLOG" -CONFIG_CMD = "GRAPH.CONFIG" -LIST_CMD = "GRAPH.LIST" -EXPLAIN_CMD = "GRAPH.EXPLAIN" - - -class GraphCommands: - """RedisGraph Commands""" - - def commit(self): - """ - Create entire graph. - """ - if len(self.nodes) == 0 and len(self.edges) == 0: - return None - - query = "CREATE " - for _, node in self.nodes.items(): - query += str(node) + "," - - query += ",".join([str(edge) for edge in self.edges]) - - # Discard leading comma. - if query[-1] == ",": - query = query[:-1] - - return self.query(query) - - def query(self, q, params=None, timeout=None, read_only=False, profile=False): - """ - Executes a query against the graph. - For more information see `GRAPH.QUERY `_. # noqa - - Args: - - q : str - The query. - params : dict - Query parameters. - timeout : int - Maximum runtime for read queries in milliseconds. - read_only : bool - Executes a readonly query if set to True. - profile : bool - Return details on results produced by and time - spent in each operation. - """ - - # maintain original 'q' - query = q - - # handle query parameters - query = self._build_params_header(params) + query - - # construct query command - # ask for compact result-set format - # specify known graph version - if profile: - cmd = PROFILE_CMD - else: - cmd = RO_QUERY_CMD if read_only else QUERY_CMD - command = [cmd, self.name, query, "--compact"] - - # include timeout is specified - if isinstance(timeout, int): - command.extend(["timeout", timeout]) - elif timeout is not None: - raise Exception("Timeout argument must be a positive integer") - - # issue query - try: - response = self.execute_command(*command) - return QueryResult(self, response, profile) - except ResponseError as e: - if "unknown command" in str(e) and read_only: - # `GRAPH.RO_QUERY` is unavailable in older versions. - return self.query(q, params, timeout, read_only=False) - raise e - except VersionMismatchException as e: - # client view over the graph schema is out of sync - # set client version and refresh local schema - self.version = e.version - self._refresh_schema() - # re-issue query - return self.query(q, params, timeout, read_only) - - def merge(self, pattern): - """ - Merge pattern. - """ - query = "MERGE " - query += str(pattern) - - return self.query(query) - - def delete(self): - """ - Deletes graph. - For more information see `DELETE `_. # noqa - """ - self._clear_schema() - return self.execute_command(DELETE_CMD, self.name) - - # declared here, to override the built in redis.db.flush() - def flush(self): - """ - Commit the graph and reset the edges and the nodes to zero length. - """ - self.commit() - self.nodes = {} - self.edges = [] - - def bulk(self, **kwargs): - """Internal only. Not supported.""" - raise NotImplementedError( - "GRAPH.BULK is internal only. " - "Use https://github.com/redisgraph/redisgraph-bulk-loader." - ) - - def profile(self, query): - """ - Execute a query and produce an execution plan augmented with metrics - for each operation's execution. Return a string representation of a - query execution plan, with details on results produced by and time - spent in each operation. - For more information see `GRAPH.PROFILE `_. # noqa - """ - return self.query(query, profile=True) - - def slowlog(self): - """ - Get a list containing up to 10 of the slowest queries issued - against the given graph ID. - For more information see `GRAPH.SLOWLOG `_. # noqa - - Each item in the list has the following structure: - 1. A unix timestamp at which the log entry was processed. - 2. The issued command. - 3. The issued query. - 4. The amount of time needed for its execution, in milliseconds. - """ - return self.execute_command(SLOWLOG_CMD, self.name) - - def config(self, name, value=None, set=False): - """ - Retrieve or update a RedisGraph configuration. - For more information see ``__. - - Args: - - name : str - The name of the configuration - value : - The value we want to set (can be used only when `set` is on) - set : bool - Turn on to set a configuration. Default behavior is get. - """ - params = ["SET" if set else "GET", name] - if value is not None: - if set: - params.append(value) - else: - raise DataError("``value`` can be provided only when ``set`` is True") # noqa - return self.execute_command(CONFIG_CMD, *params) - - def list_keys(self): - """ - Lists all graph keys in the keyspace. - For more information see `GRAPH.LIST `_. # noqa - """ - return self.execute_command(LIST_CMD) - - def execution_plan(self, query, params=None): - """ - Get the execution plan for given query, - GRAPH.EXPLAIN returns an array of operations. - - Args: - query: the query that will be executed - params: query parameters - """ - query = self._build_params_header(params) + query - - plan = self.execute_command(EXPLAIN_CMD, self.name, query) - if isinstance(plan[0], bytes): - plan = [b.decode() for b in plan] - return "\n".join(plan) - - def explain(self, query, params=None): - """ - Get the execution plan for given query, - GRAPH.EXPLAIN returns ExecutionPlan object. - For more information see `GRAPH.EXPLAIN `_. # noqa - - Args: - query: the query that will be executed - params: query parameters - """ - query = self._build_params_header(params) + query - - plan = self.execute_command(EXPLAIN_CMD, self.name, query) - return ExecutionPlan(plan) - - -class AsyncGraphCommands(GraphCommands): - async def query(self, q, params=None, timeout=None, read_only=False, profile=False): - """ - Executes a query against the graph. - For more information see `GRAPH.QUERY `_. # noqa - - Args: - - q : str - The query. - params : dict - Query parameters. - timeout : int - Maximum runtime for read queries in milliseconds. - read_only : bool - Executes a readonly query if set to True. - profile : bool - Return details on results produced by and time - spent in each operation. - """ - - # maintain original 'q' - query = q - - # handle query parameters - query = self._build_params_header(params) + query - - # construct query command - # ask for compact result-set format - # specify known graph version - if profile: - cmd = PROFILE_CMD - else: - cmd = RO_QUERY_CMD if read_only else QUERY_CMD - command = [cmd, self.name, query, "--compact"] - - # include timeout is specified - if isinstance(timeout, int): - command.extend(["timeout", timeout]) - elif timeout is not None: - raise Exception("Timeout argument must be a positive integer") - - # issue query - try: - response = await self.execute_command(*command) - return await AsyncQueryResult().initialize(self, response, profile) - except ResponseError as e: - if "unknown command" in str(e) and read_only: - # `GRAPH.RO_QUERY` is unavailable in older versions. - return await self.query(q, params, timeout, read_only=False) - raise e - except VersionMismatchException as e: - # client view over the graph schema is out of sync - # set client version and refresh local schema - self.version = e.version - self._refresh_schema() - # re-issue query - return await self.query(q, params, timeout, read_only) - - async def execution_plan(self, query, params=None): - """ - Get the execution plan for given query, - GRAPH.EXPLAIN returns an array of operations. - - Args: - query: the query that will be executed - params: query parameters - """ - query = self._build_params_header(params) + query - - plan = await self.execute_command(EXPLAIN_CMD, self.name, query) - if isinstance(plan[0], bytes): - plan = [b.decode() for b in plan] - return "\n".join(plan) - - async def explain(self, query, params=None): - """ - Get the execution plan for given query, - GRAPH.EXPLAIN returns ExecutionPlan object. - - Args: - query: the query that will be executed - params: query parameters - """ - query = self._build_params_header(params) + query - - plan = await self.execute_command(EXPLAIN_CMD, self.name, query) - return ExecutionPlan(plan) - - async def flush(self): - """ - Commit the graph and reset the edges and the nodes to zero length. - """ - await self.commit() - self.nodes = {} - self.edges = [] diff --git a/redis/commands/graph/edge.py b/redis/commands/graph/edge.py deleted file mode 100644 index 6ee195f1f5..0000000000 --- a/redis/commands/graph/edge.py +++ /dev/null @@ -1,91 +0,0 @@ -from ..helpers import quote_string -from .node import Node - - -class Edge: - """ - An edge connecting two nodes. - """ - - def __init__(self, src_node, relation, dest_node, edge_id=None, properties=None): - """ - Create a new edge. - """ - if src_node is None or dest_node is None: - # NOTE(bors-42): It makes sense to change AssertionError to - # ValueError here - raise AssertionError("Both src_node & dest_node must be provided") - - self.id = edge_id - self.relation = relation or "" - self.properties = properties or {} - self.src_node = src_node - self.dest_node = dest_node - - def to_string(self): - res = "" - if self.properties: - props = ",".join( - key + ":" + str(quote_string(val)) - for key, val in sorted(self.properties.items()) - ) - res += "{" + props + "}" - - return res - - def __str__(self): - # Source node. - if isinstance(self.src_node, Node): - res = str(self.src_node) - else: - res = "()" - - # Edge - res += "-[" - if self.relation: - res += ":" + self.relation - if self.properties: - props = ",".join( - key + ":" + str(quote_string(val)) - for key, val in sorted(self.properties.items()) - ) - res += "{" + props + "}" - res += "]->" - - # Dest node. - if isinstance(self.dest_node, Node): - res += str(self.dest_node) - else: - res += "()" - - return res - - def __eq__(self, rhs): - # Type checking - if not isinstance(rhs, Edge): - return False - - # Quick positive check, if both IDs are set. - if self.id is not None and rhs.id is not None and self.id == rhs.id: - return True - - # Source and destination nodes should match. - if self.src_node != rhs.src_node: - return False - - if self.dest_node != rhs.dest_node: - return False - - # Relation should match. - if self.relation != rhs.relation: - return False - - # Quick check for number of properties. - if len(self.properties) != len(rhs.properties): - return False - - # Compare properties. - if self.properties != rhs.properties: - return False - - return True diff --git a/redis/commands/graph/exceptions.py b/redis/commands/graph/exceptions.py deleted file mode 100644 index 4bbac1008e..0000000000 --- a/redis/commands/graph/exceptions.py +++ /dev/null @@ -1,3 +0,0 @@ -class VersionMismatchException(Exception): - def __init__(self, version): - self.version = version diff --git a/redis/commands/graph/execution_plan.py b/redis/commands/graph/execution_plan.py deleted file mode 100644 index 179a80cca0..0000000000 --- a/redis/commands/graph/execution_plan.py +++ /dev/null @@ -1,211 +0,0 @@ -import re - - -class ProfileStats: - """ - ProfileStats, runtime execution statistics of operation. - """ - - def __init__(self, records_produced, execution_time): - self.records_produced = records_produced - self.execution_time = execution_time - - -class Operation: - """ - Operation, single operation within execution plan. - """ - - def __init__(self, name, args=None, profile_stats=None): - """ - Create a new operation. - - Args: - name: string that represents the name of the operation - args: operation arguments - profile_stats: profile statistics - """ - self.name = name - self.args = args - self.profile_stats = profile_stats - self.children = [] - - def append_child(self, child): - if not isinstance(child, Operation) or self is child: - raise Exception("child must be Operation") - - self.children.append(child) - return self - - def child_count(self): - return len(self.children) - - def __eq__(self, o: object) -> bool: - if not isinstance(o, Operation): - return False - - return self.name == o.name and self.args == o.args - - def __str__(self) -> str: - args_str = "" if self.args is None else " | " + self.args - return f"{self.name}{args_str}" - - -class ExecutionPlan: - """ - ExecutionPlan, collection of operations. - """ - - def __init__(self, plan): - """ - Create a new execution plan. - - Args: - plan: array of strings that represents the collection operations - the output from GRAPH.EXPLAIN - """ - if not isinstance(plan, list): - raise Exception("plan must be an array") - - if isinstance(plan[0], bytes): - plan = [b.decode() for b in plan] - - self.plan = plan - self.structured_plan = self._operation_tree() - - def _compare_operations(self, root_a, root_b): - """ - Compare execution plan operation tree - - Return: True if operation trees are equal, False otherwise - """ - - # compare current root - if root_a != root_b: - return False - - # make sure root have the same number of children - if root_a.child_count() != root_b.child_count(): - return False - - # recursively compare children - for i in range(root_a.child_count()): - if not self._compare_operations(root_a.children[i], root_b.children[i]): - return False - - return True - - def __str__(self) -> str: - def aggraget_str(str_children): - return "\n".join( - [ - " " + line - for str_child in str_children - for line in str_child.splitlines() - ] - ) - - def combine_str(x, y): - return f"{x}\n{y}" - - return self._operation_traverse( - self.structured_plan, str, aggraget_str, combine_str - ) - - def __eq__(self, o: object) -> bool: - """Compares two execution plans - - Return: True if the two plans are equal False otherwise - """ - # make sure 'o' is an execution-plan - if not isinstance(o, ExecutionPlan): - return False - - # get root for both plans - root_a = self.structured_plan - root_b = o.structured_plan - - # compare execution trees - return self._compare_operations(root_a, root_b) - - def _operation_traverse(self, op, op_f, aggregate_f, combine_f): - """ - Traverse operation tree recursively applying functions - - Args: - op: operation to traverse - op_f: function applied for each operation - aggregate_f: aggregation function applied for all children of a single operation - combine_f: combine function applied for the operation result and the children result - """ # noqa - # apply op_f for each operation - op_res = op_f(op) - if len(op.children) == 0: - return op_res # no children return - else: - # apply _operation_traverse recursively - children = [ - self._operation_traverse(child, op_f, aggregate_f, combine_f) - for child in op.children - ] - # combine the operation result with the children aggregated result - return combine_f(op_res, aggregate_f(children)) - - def _operation_tree(self): - """Build the operation tree from the string representation""" - - # initial state - i = 0 - level = 0 - stack = [] - current = None - - def _create_operation(args): - profile_stats = None - name = args[0].strip() - args.pop(0) - if len(args) > 0 and "Records produced" in args[-1]: - records_produced = int( - re.search("Records produced: (\\d+)", args[-1]).group(1) - ) - execution_time = float( - re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1) - ) - profile_stats = ProfileStats(records_produced, execution_time) - args.pop(-1) - return Operation( - name, None if len(args) == 0 else args[0].strip(), profile_stats - ) - - # iterate plan operations - while i < len(self.plan): - current_op = self.plan[i] - op_level = current_op.count(" ") - if op_level == level: - # if the operation level equal to the current level - # set the current operation and move next - child = _create_operation(current_op.split("|")) - if current: - current = stack.pop() - current.append_child(child) - current = child - i += 1 - elif op_level == level + 1: - # if the operation is child of the current operation - # add it as child and set as current operation - child = _create_operation(current_op.split("|")) - current.append_child(child) - stack.append(current) - current = child - level += 1 - i += 1 - elif op_level < level: - # if the operation is not child of current operation - # go back to it's parent operation - levels_back = level - op_level + 1 - for _ in range(levels_back): - current = stack.pop() - level -= levels_back - else: - raise Exception("corrupted plan") - return stack[0] diff --git a/redis/commands/graph/node.py b/redis/commands/graph/node.py deleted file mode 100644 index 4546a393b1..0000000000 --- a/redis/commands/graph/node.py +++ /dev/null @@ -1,88 +0,0 @@ -from ..helpers import quote_string - - -class Node: - """ - A node within the graph. - """ - - def __init__(self, node_id=None, alias=None, label=None, properties=None): - """ - Create a new node. - """ - self.id = node_id - self.alias = alias - if isinstance(label, list): - label = [inner_label for inner_label in label if inner_label != ""] - - if ( - label is None - or label == "" - or (isinstance(label, list) and len(label) == 0) - ): - self.label = None - self.labels = None - elif isinstance(label, str): - self.label = label - self.labels = [label] - elif isinstance(label, list) and all( - [isinstance(inner_label, str) for inner_label in label] - ): - self.label = label[0] - self.labels = label - else: - raise AssertionError( - "label should be either None, string or a list of strings" - ) - - self.properties = properties or {} - - def to_string(self): - res = "" - if self.properties: - props = ",".join( - key + ":" + str(quote_string(val)) - for key, val in sorted(self.properties.items()) - ) - res += "{" + props + "}" - - return res - - def __str__(self): - res = "(" - if self.alias: - res += self.alias - if self.labels: - res += ":" + ":".join(self.labels) - if self.properties: - props = ",".join( - key + ":" + str(quote_string(val)) - for key, val in sorted(self.properties.items()) - ) - res += "{" + props + "}" - res += ")" - - return res - - def __eq__(self, rhs): - # Type checking - if not isinstance(rhs, Node): - return False - - # Quick positive check, if both IDs are set. - if self.id is not None and rhs.id is not None and self.id != rhs.id: - return False - - # Label should match. - if self.label != rhs.label: - return False - - # Quick check for number of properties. - if len(self.properties) != len(rhs.properties): - return False - - # Compare properties. - if self.properties != rhs.properties: - return False - - return True diff --git a/redis/commands/graph/path.py b/redis/commands/graph/path.py deleted file mode 100644 index ee22dc8c6b..0000000000 --- a/redis/commands/graph/path.py +++ /dev/null @@ -1,78 +0,0 @@ -from .edge import Edge -from .node import Node - - -class Path: - def __init__(self, nodes, edges): - if not (isinstance(nodes, list) and isinstance(edges, list)): - raise TypeError("nodes and edges must be list") - - self._nodes = nodes - self._edges = edges - self.append_type = Node - - @classmethod - def new_empty_path(cls): - return cls([], []) - - def nodes(self): - return self._nodes - - def edges(self): - return self._edges - - def get_node(self, index): - return self._nodes[index] - - def get_relationship(self, index): - return self._edges[index] - - def first_node(self): - return self._nodes[0] - - def last_node(self): - return self._nodes[-1] - - def edge_count(self): - return len(self._edges) - - def nodes_count(self): - return len(self._nodes) - - def add_node(self, node): - if not isinstance(node, self.append_type): - raise AssertionError("Add Edge before adding Node") - self._nodes.append(node) - self.append_type = Edge - return self - - def add_edge(self, edge): - if not isinstance(edge, self.append_type): - raise AssertionError("Add Node before adding Edge") - self._edges.append(edge) - self.append_type = Node - return self - - def __eq__(self, other): - # Type checking - if not isinstance(other, Path): - return False - - return self.nodes() == other.nodes() and self.edges() == other.edges() - - def __str__(self): - res = "<" - edge_count = self.edge_count() - for i in range(0, edge_count): - node_id = self.get_node(i).id - res += "(" + str(node_id) + ")" - edge = self.get_relationship(i) - res += ( - "-[" + str(int(edge.id)) + "]->" - if edge.src_node == node_id - else "<-[" + str(int(edge.id)) + "]-" - ) - node_id = self.get_node(edge_count).id - res += "(" + str(node_id) + ")" - res += ">" - return res diff --git a/redis/commands/graph/query_result.py b/redis/commands/graph/query_result.py deleted file mode 100644 index 7709081bcf..0000000000 --- a/redis/commands/graph/query_result.py +++ /dev/null @@ -1,588 +0,0 @@ -import sys -from collections import OrderedDict - -# from prettytable import PrettyTable -from redis import ResponseError - -from .edge import Edge -from .exceptions import VersionMismatchException -from .node import Node -from .path import Path - -LABELS_ADDED = "Labels added" -LABELS_REMOVED = "Labels removed" -NODES_CREATED = "Nodes created" -NODES_DELETED = "Nodes deleted" -RELATIONSHIPS_DELETED = "Relationships deleted" -PROPERTIES_SET = "Properties set" -PROPERTIES_REMOVED = "Properties removed" -RELATIONSHIPS_CREATED = "Relationships created" -INDICES_CREATED = "Indices created" -INDICES_DELETED = "Indices deleted" -CACHED_EXECUTION = "Cached execution" -INTERNAL_EXECUTION_TIME = "internal execution time" - -STATS = [ - LABELS_ADDED, - LABELS_REMOVED, - NODES_CREATED, - PROPERTIES_SET, - PROPERTIES_REMOVED, - RELATIONSHIPS_CREATED, - NODES_DELETED, - RELATIONSHIPS_DELETED, - INDICES_CREATED, - INDICES_DELETED, - CACHED_EXECUTION, - INTERNAL_EXECUTION_TIME, -] - - -class ResultSetColumnTypes: - COLUMN_UNKNOWN = 0 - COLUMN_SCALAR = 1 - COLUMN_NODE = 2 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa - COLUMN_RELATION = 3 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa - - -class ResultSetScalarTypes: - VALUE_UNKNOWN = 0 - VALUE_NULL = 1 - VALUE_STRING = 2 - VALUE_INTEGER = 3 - VALUE_BOOLEAN = 4 - VALUE_DOUBLE = 5 - VALUE_ARRAY = 6 - VALUE_EDGE = 7 - VALUE_NODE = 8 - VALUE_PATH = 9 - VALUE_MAP = 10 - VALUE_POINT = 11 - - -class QueryResult: - def __init__(self, graph, response, profile=False): - """ - A class that represents a result of the query operation. - - Args: - - graph: - The graph on which the query was executed. - response: - The response from the server. - profile: - A boolean indicating if the query command was "GRAPH.PROFILE" - """ - self.graph = graph - self.header = [] - self.result_set = [] - - # in case of an error an exception will be raised - self._check_for_errors(response) - - if len(response) == 1: - self.parse_statistics(response[0]) - elif profile: - self.parse_profile(response) - else: - # start by parsing statistics, matches the one we have - self.parse_statistics(response[-1]) # Last element. - self.parse_results(response) - - def _check_for_errors(self, response): - """ - Check if the response contains an error. - """ - if isinstance(response[0], ResponseError): - error = response[0] - if str(error) == "version mismatch": - version = response[1] - error = VersionMismatchException(version) - raise error - - # If we encountered a run-time error, the last response - # element will be an exception - if isinstance(response[-1], ResponseError): - raise response[-1] - - def parse_results(self, raw_result_set): - """ - Parse the query execution result returned from the server. - """ - self.header = self.parse_header(raw_result_set) - - # Empty header. - if len(self.header) == 0: - return - - self.result_set = self.parse_records(raw_result_set) - - def parse_statistics(self, raw_statistics): - """ - Parse the statistics returned in the response. - """ - self.statistics = {} - - # decode statistics - for idx, stat in enumerate(raw_statistics): - if isinstance(stat, bytes): - raw_statistics[idx] = stat.decode() - - for s in STATS: - v = self._get_value(s, raw_statistics) - if v is not None: - self.statistics[s] = v - - def parse_header(self, raw_result_set): - """ - Parse the header of the result. - """ - # An array of column name/column type pairs. - header = raw_result_set[0] - return header - - def parse_records(self, raw_result_set): - """ - Parses the result set and returns a list of records. - """ - records = [ - [ - self.parse_record_types[self.header[idx][0]](cell) - for idx, cell in enumerate(row) - ] - for row in raw_result_set[1] - ] - - return records - - def parse_entity_properties(self, props): - """ - Parse node / edge properties. - """ - # [[name, value type, value] X N] - properties = {} - for prop in props: - prop_name = self.graph.get_property(prop[0]) - prop_value = self.parse_scalar(prop[1:]) - properties[prop_name] = prop_value - - return properties - - def parse_string(self, cell): - """ - Parse the cell as a string. - """ - if isinstance(cell, bytes): - return cell.decode() - elif not isinstance(cell, str): - return str(cell) - else: - return cell - - def parse_node(self, cell): - """ - Parse the cell to a node. - """ - # Node ID (integer), - # [label string offset (integer)], - # [[name, value type, value] X N] - - node_id = int(cell[0]) - labels = None - if len(cell[1]) > 0: - labels = [] - for inner_label in cell[1]: - labels.append(self.graph.get_label(inner_label)) - properties = self.parse_entity_properties(cell[2]) - return Node(node_id=node_id, label=labels, properties=properties) - - def parse_edge(self, cell): - """ - Parse the cell to an edge. - """ - # Edge ID (integer), - # reltype string offset (integer), - # src node ID offset (integer), - # dest node ID offset (integer), - # [[name, value, value type] X N] - - edge_id = int(cell[0]) - relation = self.graph.get_relation(cell[1]) - src_node_id = int(cell[2]) - dest_node_id = int(cell[3]) - properties = self.parse_entity_properties(cell[4]) - return Edge( - src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties - ) - - def parse_path(self, cell): - """ - Parse the cell to a path. - """ - nodes = self.parse_scalar(cell[0]) - edges = self.parse_scalar(cell[1]) - return Path(nodes, edges) - - def parse_map(self, cell): - """ - Parse the cell as a map. - """ - m = OrderedDict() - n_entries = len(cell) - - # A map is an array of key value pairs. - # 1. key (string) - # 2. array: (value type, value) - for i in range(0, n_entries, 2): - key = self.parse_string(cell[i]) - m[key] = self.parse_scalar(cell[i + 1]) - - return m - - def parse_point(self, cell): - """ - Parse the cell to point. - """ - p = {} - # A point is received an array of the form: [latitude, longitude] - # It is returned as a map of the form: {"latitude": latitude, "longitude": longitude} # noqa - p["latitude"] = float(cell[0]) - p["longitude"] = float(cell[1]) - return p - - def parse_null(self, cell): - """ - Parse a null value. - """ - return None - - def parse_integer(self, cell): - """ - Parse the integer value from the cell. - """ - return int(cell) - - def parse_boolean(self, value): - """ - Parse the cell value as a boolean. - """ - value = value.decode() if isinstance(value, bytes) else value - try: - scalar = True if strtobool(value) else False - except ValueError: - sys.stderr.write("unknown boolean type\n") - scalar = None - return scalar - - def parse_double(self, cell): - """ - Parse the cell as a double. - """ - return float(cell) - - def parse_array(self, value): - """ - Parse an array of values. - """ - scalar = [self.parse_scalar(value[i]) for i in range(len(value))] - return scalar - - def parse_unknown(self, cell): - """ - Parse a cell of unknown type. - """ - sys.stderr.write("Unknown type\n") - return None - - def parse_scalar(self, cell): - """ - Parse a scalar value from a cell in the result set. - """ - scalar_type = int(cell[0]) - value = cell[1] - scalar = self.parse_scalar_types[scalar_type](value) - - return scalar - - def parse_profile(self, response): - self.result_set = [x[0 : x.index(",")].strip() for x in response] - - def is_empty(self): - return len(self.result_set) == 0 - - @staticmethod - def _get_value(prop, statistics): - for stat in statistics: - if prop in stat: - return float(stat.split(": ")[1].split(" ")[0]) - - return None - - def _get_stat(self, stat): - return self.statistics[stat] if stat in self.statistics else 0 - - @property - def labels_added(self): - """Returns the number of labels added in the query""" - return self._get_stat(LABELS_ADDED) - - @property - def labels_removed(self): - """Returns the number of labels removed in the query""" - return self._get_stat(LABELS_REMOVED) - - @property - def nodes_created(self): - """Returns the number of nodes created in the query""" - return self._get_stat(NODES_CREATED) - - @property - def nodes_deleted(self): - """Returns the number of nodes deleted in the query""" - return self._get_stat(NODES_DELETED) - - @property - def properties_set(self): - """Returns the number of properties set in the query""" - return self._get_stat(PROPERTIES_SET) - - @property - def properties_removed(self): - """Returns the number of properties removed in the query""" - return self._get_stat(PROPERTIES_REMOVED) - - @property - def relationships_created(self): - """Returns the number of relationships created in the query""" - return self._get_stat(RELATIONSHIPS_CREATED) - - @property - def relationships_deleted(self): - """Returns the number of relationships deleted in the query""" - return self._get_stat(RELATIONSHIPS_DELETED) - - @property - def indices_created(self): - """Returns the number of indices created in the query""" - return self._get_stat(INDICES_CREATED) - - @property - def indices_deleted(self): - """Returns the number of indices deleted in the query""" - return self._get_stat(INDICES_DELETED) - - @property - def cached_execution(self): - """Returns whether or not the query execution plan was cached""" - return self._get_stat(CACHED_EXECUTION) == 1 - - @property - def run_time_ms(self): - """Returns the server execution time of the query""" - return self._get_stat(INTERNAL_EXECUTION_TIME) - - @property - def parse_scalar_types(self): - return { - ResultSetScalarTypes.VALUE_NULL: self.parse_null, - ResultSetScalarTypes.VALUE_STRING: self.parse_string, - ResultSetScalarTypes.VALUE_INTEGER: self.parse_integer, - ResultSetScalarTypes.VALUE_BOOLEAN: self.parse_boolean, - ResultSetScalarTypes.VALUE_DOUBLE: self.parse_double, - ResultSetScalarTypes.VALUE_ARRAY: self.parse_array, - ResultSetScalarTypes.VALUE_NODE: self.parse_node, - ResultSetScalarTypes.VALUE_EDGE: self.parse_edge, - ResultSetScalarTypes.VALUE_PATH: self.parse_path, - ResultSetScalarTypes.VALUE_MAP: self.parse_map, - ResultSetScalarTypes.VALUE_POINT: self.parse_point, - ResultSetScalarTypes.VALUE_UNKNOWN: self.parse_unknown, - } - - @property - def parse_record_types(self): - return { - ResultSetColumnTypes.COLUMN_SCALAR: self.parse_scalar, - ResultSetColumnTypes.COLUMN_NODE: self.parse_node, - ResultSetColumnTypes.COLUMN_RELATION: self.parse_edge, - ResultSetColumnTypes.COLUMN_UNKNOWN: self.parse_unknown, - } - - -class AsyncQueryResult(QueryResult): - """ - Async version for the QueryResult class - a class that - represents a result of the query operation. - """ - - def __init__(self): - """ - To init the class you must call self.initialize() - """ - pass - - async def initialize(self, graph, response, profile=False): - """ - Initializes the class. - Args: - - graph: - The graph on which the query was executed. - response: - The response from the server. - profile: - A boolean indicating if the query command was "GRAPH.PROFILE" - """ - self.graph = graph - self.header = [] - self.result_set = [] - - # in case of an error an exception will be raised - self._check_for_errors(response) - - if len(response) == 1: - self.parse_statistics(response[0]) - elif profile: - self.parse_profile(response) - else: - # start by parsing statistics, matches the one we have - self.parse_statistics(response[-1]) # Last element. - await self.parse_results(response) - - return self - - async def parse_node(self, cell): - """ - Parses a node from the cell. - """ - # Node ID (integer), - # [label string offset (integer)], - # [[name, value type, value] X N] - - labels = None - if len(cell[1]) > 0: - labels = [] - for inner_label in cell[1]: - labels.append(await self.graph.get_label(inner_label)) - properties = await self.parse_entity_properties(cell[2]) - node_id = int(cell[0]) - return Node(node_id=node_id, label=labels, properties=properties) - - async def parse_scalar(self, cell): - """ - Parses a scalar value from the server response. - """ - scalar_type = int(cell[0]) - value = cell[1] - try: - scalar = await self.parse_scalar_types[scalar_type](value) - except TypeError: - # Not all of the functions are async - scalar = self.parse_scalar_types[scalar_type](value) - - return scalar - - async def parse_records(self, raw_result_set): - """ - Parses the result set and returns a list of records. - """ - records = [] - for row in raw_result_set[1]: - record = [ - await self.parse_record_types[self.header[idx][0]](cell) - for idx, cell in enumerate(row) - ] - records.append(record) - - return records - - async def parse_results(self, raw_result_set): - """ - Parse the query execution result returned from the server. - """ - self.header = self.parse_header(raw_result_set) - - # Empty header. - if len(self.header) == 0: - return - - self.result_set = await self.parse_records(raw_result_set) - - async def parse_entity_properties(self, props): - """ - Parse node / edge properties. - """ - # [[name, value type, value] X N] - properties = {} - for prop in props: - prop_name = await self.graph.get_property(prop[0]) - prop_value = await self.parse_scalar(prop[1:]) - properties[prop_name] = prop_value - - return properties - - async def parse_edge(self, cell): - """ - Parse the cell to an edge. - """ - # Edge ID (integer), - # reltype string offset (integer), - # src node ID offset (integer), - # dest node ID offset (integer), - # [[name, value, value type] X N] - - edge_id = int(cell[0]) - relation = await self.graph.get_relation(cell[1]) - src_node_id = int(cell[2]) - dest_node_id = int(cell[3]) - properties = await self.parse_entity_properties(cell[4]) - return Edge( - src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties - ) - - async def parse_path(self, cell): - """ - Parse the cell to a path. - """ - nodes = await self.parse_scalar(cell[0]) - edges = await self.parse_scalar(cell[1]) - return Path(nodes, edges) - - async def parse_map(self, cell): - """ - Parse the cell to a map. - """ - m = OrderedDict() - n_entries = len(cell) - - # A map is an array of key value pairs. - # 1. key (string) - # 2. array: (value type, value) - for i in range(0, n_entries, 2): - key = self.parse_string(cell[i]) - m[key] = await self.parse_scalar(cell[i + 1]) - - return m - - async def parse_array(self, value): - """ - Parse array value. - """ - scalar = [await self.parse_scalar(value[i]) for i in range(len(value))] - return scalar - - -def strtobool(val): - """ - Convert a string representation of truth to true (1) or false (0). - True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values - are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if - 'val' is anything else. - """ - val = val.lower() - if val in ("y", "yes", "t", "true", "on", "1"): - return True - elif val in ("n", "no", "f", "false", "off", "0"): - return False - else: - raise ValueError(f"invalid truth value {val!r}") diff --git a/redis/commands/helpers.py b/redis/commands/helpers.py index e11d34fb71..f6121b6c3b 100644 --- a/redis/commands/helpers.py +++ b/redis/commands/helpers.py @@ -88,26 +88,6 @@ def random_string(length=10): ) -def quote_string(v): - """ - RedisGraph strings must be quoted, - quote_string wraps given v with quotes incase - v is a string. - """ - - if isinstance(v, bytes): - v = v.decode() - elif not isinstance(v, str): - return v - if len(v) == 0: - return '""' - - v = v.replace("\\", "\\\\") - v = v.replace('"', '\\"') - - return f'"{v}"' - - def decode_dict_keys(obj): """Decode the keys of the given dictionary with utf-8.""" newobj = copy.copy(obj) @@ -118,33 +98,6 @@ def decode_dict_keys(obj): return newobj -def stringify_param_value(value): - """ - Turn a parameter value into a string suitable for the params header of - a Cypher command. - You may pass any value that would be accepted by `json.dumps()`. - - Ways in which output differs from that of `str()`: - * Strings are quoted. - * None --> "null". - * In dictionaries, keys are _not_ quoted. - - :param value: The parameter value to be turned into a string. - :return: string - """ - - if isinstance(value, str): - return quote_string(value) - elif value is None: - return "null" - elif isinstance(value, (list, tuple)): - return f"[{','.join(map(stringify_param_value, value))}]" - elif isinstance(value, dict): - return f"{{{','.join(f'{k}:{stringify_param_value(v)}' for k, v in value.items())}}}" # noqa - else: - return str(value) - - def get_protocol_version(client): if isinstance(client, redis.Redis) or isinstance(client, redis.asyncio.Redis): return client.connection_pool.connection_kwargs.get("protocol") diff --git a/redis/commands/redismodules.py b/redis/commands/redismodules.py index 7e2045a722..7ba40dd845 100644 --- a/redis/commands/redismodules.py +++ b/redis/commands/redismodules.py @@ -72,16 +72,6 @@ def tdigest(self): tdigest = TDigestBloom(client=self) return tdigest - def graph(self, index_name="idx"): - """Access the graph namespace, providing support for - redis graph data. - """ - - from .graph import Graph - - g = Graph(client=self, name=index_name) - return g - class AsyncRedisModuleCommands(RedisModuleCommands): def ft(self, index_name="idx"): @@ -91,13 +81,3 @@ def ft(self, index_name="idx"): s = AsyncSearch(client=self, index_name=index_name) return s - - def graph(self, index_name="idx"): - """Access the graph namespace, providing support for - redis graph data. - """ - - from .graph import AsyncGraph - - g = AsyncGraph(client=self, name=index_name) - return g diff --git a/tasks.py b/tasks.py index 2d1a073437..52decf08e7 100644 --- a/tasks.py +++ b/tasks.py @@ -58,11 +58,11 @@ def standalone_tests( if uvloop: run( - f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}_uvloop.xml -m 'not onlycluster and not graph{extra_markers}' --uvloop --junit-xml=standalone-resp{protocol}-uvloop-results.xml" + f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}_uvloop.xml -m 'not onlycluster{extra_markers}' --uvloop --junit-xml=standalone-resp{protocol}-uvloop-results.xml" ) else: run( - f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}.xml -m 'not onlycluster and not graph{extra_markers}' --junit-xml=standalone-resp{protocol}-results.xml" + f"pytest {profile_arg} --protocol={protocol} {redis_mod_url} --cov=./ --cov-report=xml:coverage_resp{protocol}.xml -m 'not onlycluster{extra_markers}' --junit-xml=standalone-resp{protocol}-results.xml" ) @@ -74,11 +74,11 @@ def cluster_tests(c, uvloop=False, protocol=2, profile=False): cluster_tls_url = "rediss://localhost:27379/0" if uvloop: run( - f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}_uvloop.xml -m 'not onlynoncluster and not redismod and not graph' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-uvloop-results.xml --uvloop" + f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}_uvloop.xml -m 'not onlynoncluster and not redismod' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-uvloop-results.xml --uvloop" ) else: run( - f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}.xml -m 'not onlynoncluster and not redismod and not graph' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-results.xml" + f"pytest {profile_arg} --protocol={protocol} --cov=./ --cov-report=xml:coverage_cluster_resp{protocol}.xml -m 'not onlynoncluster and not redismod' --redis-url={cluster_url} --redis-ssl-url={cluster_tls_url} --junit-xml=cluster-resp{protocol}-results.xml" ) diff --git a/tests/test_asyncio/test_graph.py b/tests/test_asyncio/test_graph.py deleted file mode 100644 index 7b823265c3..0000000000 --- a/tests/test_asyncio/test_graph.py +++ /dev/null @@ -1,526 +0,0 @@ -import pytest -import pytest_asyncio -import redis.asyncio as redis -from redis.commands.graph import Edge, Node, Path -from redis.commands.graph.execution_plan import Operation -from redis.exceptions import ResponseError -from tests.conftest import skip_if_redis_enterprise, skip_if_resp_version - - -@pytest_asyncio.fixture() -async def decoded_r(create_redis, stack_url): - return await create_redis(decode_responses=True, url="redis://localhost:6480") - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_bulk(decoded_r): - with pytest.raises(NotImplementedError): - await decoded_r.graph().bulk() - await decoded_r.graph().bulk(foo="bar!") - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_graph_creation(decoded_r: redis.Redis): - graph = decoded_r.graph() - - john = Node( - label="person", - properties={ - "name": "John Doe", - "age": 33, - "gender": "male", - "status": "single", - }, - ) - graph.add_node(john) - japan = Node(label="country", properties={"name": "Japan"}) - - graph.add_node(japan) - edge = Edge(john, "visited", japan, properties={"purpose": "pleasure"}) - graph.add_edge(edge) - - await graph.commit() - - query = ( - 'MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) RETURN p, v, c' - ) - - result = await graph.query(query) - - person = result.result_set[0][0] - visit = result.result_set[0][1] - country = result.result_set[0][2] - - assert person == john - assert visit.properties == edge.properties - assert country == japan - - query = """RETURN [1, 2.3, "4", true, false, null]""" - result = await graph.query(query) - assert [1, 2.3, "4", True, False, None] == result.result_set[0][0] - - # All done, remove graph. - await graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_array_functions(decoded_r: redis.Redis): - graph = decoded_r.graph() - - query = """CREATE (p:person{name:'a',age:32, array:[0,1,2]})""" - await graph.query(query) - - query = """WITH [0,1,2] as x return x""" - result = await graph.query(query) - assert [0, 1, 2] == result.result_set[0][0] - - query = """MATCH(n) return collect(n)""" - result = await graph.query(query) - - a = Node( - node_id=0, - label="person", - properties={"name": "a", "age": 32, "array": [0, 1, 2]}, - ) - - assert [a] == result.result_set[0][0] - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_path(decoded_r: redis.Redis): - node0 = Node(node_id=0, label="L1") - node1 = Node(node_id=1, label="L1") - edge01 = Edge(node0, "R1", node1, edge_id=0, properties={"value": 1}) - - graph = decoded_r.graph() - graph.add_node(node0) - graph.add_node(node1) - graph.add_edge(edge01) - await graph.flush() - - path01 = Path.new_empty_path().add_node(node0).add_edge(edge01).add_node(node1) - expected_results = [[path01]] - - query = "MATCH p=(:L1)-[:R1]->(:L1) RETURN p ORDER BY p" - result = await graph.query(query) - assert expected_results == result.result_set - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_param(decoded_r: redis.Redis): - params = [1, 2.3, "str", True, False, None, [0, 1, 2]] - query = "RETURN $param" - for param in params: - result = await decoded_r.graph().query(query, {"param": param}) - expected_results = [[param]] - assert expected_results == result.result_set - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_map(decoded_r: redis.Redis): - query = "RETURN {a:1, b:'str', c:NULL, d:[1,2,3], e:True, f:{x:1, y:2}}" - - actual = (await decoded_r.graph().query(query)).result_set[0][0] - expected = { - "a": 1, - "b": "str", - "c": None, - "d": [1, 2, 3], - "e": True, - "f": {"x": 1, "y": 2}, - } - - assert actual == expected - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_point(decoded_r: redis.Redis): - query = "RETURN point({latitude: 32.070794860, longitude: 34.820751118})" - expected_lat = 32.070794860 - expected_lon = 34.820751118 - actual = (await decoded_r.graph().query(query)).result_set[0][0] - assert abs(actual["latitude"] - expected_lat) < 0.001 - assert abs(actual["longitude"] - expected_lon) < 0.001 - - query = "RETURN point({latitude: 32, longitude: 34.0})" - expected_lat = 32 - expected_lon = 34 - actual = (await decoded_r.graph().query(query)).result_set[0][0] - assert abs(actual["latitude"] - expected_lat) < 0.001 - assert abs(actual["longitude"] - expected_lon) < 0.001 - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_index_response(decoded_r: redis.Redis): - result_set = await decoded_r.graph().query("CREATE INDEX ON :person(age)") - assert 1 == result_set.indices_created - - result_set = await decoded_r.graph().query("CREATE INDEX ON :person(age)") - assert 0 == result_set.indices_created - - result_set = await decoded_r.graph().query("DROP INDEX ON :person(age)") - assert 1 == result_set.indices_deleted - - with pytest.raises(ResponseError): - await decoded_r.graph().query("DROP INDEX ON :person(age)") - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_stringify_query_result(decoded_r: redis.Redis): - graph = decoded_r.graph() - - john = Node( - alias="a", - label="person", - properties={ - "name": "John Doe", - "age": 33, - "gender": "male", - "status": "single", - }, - ) - graph.add_node(john) - - japan = Node(alias="b", label="country", properties={"name": "Japan"}) - graph.add_node(japan) - - edge = Edge(john, "visited", japan, properties={"purpose": "pleasure"}) - graph.add_edge(edge) - - assert ( - str(john) - == """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - ) - assert ( - str(edge) - == """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - + """-[:visited{purpose:"pleasure"}]->""" - + """(b:country{name:"Japan"})""" - ) - assert str(japan) == """(b:country{name:"Japan"})""" - - await graph.commit() - - query = """MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) - RETURN p, v, c""" - - result = await graph.query(query) - person = result.result_set[0][0] - visit = result.result_set[0][1] - country = result.result_set[0][2] - - assert ( - str(person) - == """(:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - ) - assert str(visit) == """()-[:visited{purpose:"pleasure"}]->()""" - assert str(country) == """(:country{name:"Japan"})""" - - await graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_optional_match(decoded_r: redis.Redis): - # Build a graph of form (a)-[R]->(b) - node0 = Node(node_id=0, label="L1", properties={"value": "a"}) - node1 = Node(node_id=1, label="L1", properties={"value": "b"}) - - edge01 = Edge(node0, "R", node1, edge_id=0) - - graph = decoded_r.graph() - graph.add_node(node0) - graph.add_node(node1) - graph.add_edge(edge01) - await graph.flush() - - # Issue a query that collects all outgoing edges from both nodes - # (the second has none) - query = """MATCH (a) OPTIONAL MATCH (a)-[e]->(b) RETURN a, e, b ORDER BY a.value""" # noqa - expected_results = [[node0, edge01, node1], [node1, None, None]] - - result = await graph.query(query) - assert expected_results == result.result_set - - await graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_cached_execution(decoded_r: redis.Redis): - await decoded_r.graph().query("CREATE ()") - - uncached_result = await decoded_r.graph().query( - "MATCH (n) RETURN n, $param", {"param": [0]} - ) - assert uncached_result.cached_execution is False - - # loop to make sure the query is cached on each thread on server - for x in range(0, 64): - cached_result = await decoded_r.graph().query( - "MATCH (n) RETURN n, $param", {"param": [0]} - ) - assert uncached_result.result_set == cached_result.result_set - - # should be cached on all threads by now - assert cached_result.cached_execution - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_slowlog(decoded_r: redis.Redis): - create_query = """CREATE - (:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), - (:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), - (:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - await decoded_r.graph().query(create_query) - - results = await decoded_r.graph().slowlog() - assert results[0][1] == "GRAPH.QUERY" - assert results[0][2] == create_query - - -@pytest.mark.xfail(strict=False) -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_query_timeout(decoded_r: redis.Redis): - # Build a sample graph with 1000 nodes. - await decoded_r.graph().query("UNWIND range(0,1000) as val CREATE ({v: val})") - # Issue a long-running query with a 1-millisecond timeout. - with pytest.raises(ResponseError): - await decoded_r.graph().query("MATCH (a), (b), (c), (d) RETURN *", timeout=1) - assert False is False - - with pytest.raises(Exception): - await decoded_r.graph().query("RETURN 1", timeout="str") - assert False is False - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_read_only_query(decoded_r: redis.Redis): - with pytest.raises(Exception): - # Issue a write query, specifying read-only true, - # this call should fail. - await decoded_r.graph().query("CREATE (p:person {name:'a'})", read_only=True) - assert False is False - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_profile(decoded_r: redis.Redis): - q = """UNWIND range(1, 3) AS x CREATE (p:Person {v:x})""" - profile = (await decoded_r.graph().profile(q)).result_set - assert "Create | Records produced: 3" in profile - assert "Unwind | Records produced: 3" in profile - - q = "MATCH (p:Person) WHERE p.v > 1 RETURN p" - profile = (await decoded_r.graph().profile(q)).result_set - assert "Results | Records produced: 2" in profile - assert "Project | Records produced: 2" in profile - assert "Filter | Records produced: 2" in profile - assert "Node By Label Scan | (p:Person) | Records produced: 3" in profile - - -@skip_if_redis_enterprise() -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_config(decoded_r: redis.Redis): - config_name = "RESULTSET_SIZE" - config_value = 3 - - # Set configuration - response = await decoded_r.graph().config(config_name, config_value, set=True) - assert response == "OK" - - # Make sure config been updated. - response = await decoded_r.graph().config(config_name, set=False) - expected_response = [config_name, config_value] - assert response == expected_response - - config_name = "QUERY_MEM_CAPACITY" - config_value = 1 << 20 # 1MB - - # Set configuration - response = await decoded_r.graph().config(config_name, config_value, set=True) - assert response == "OK" - - # Make sure config been updated. - response = await decoded_r.graph().config(config_name, set=False) - expected_response = [config_name, config_value] - assert response == expected_response - - # reset to default - await decoded_r.graph().config("QUERY_MEM_CAPACITY", 0, set=True) - await decoded_r.graph().config("RESULTSET_SIZE", -100, set=True) - - -@pytest.mark.onlynoncluster -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_list_keys(decoded_r: redis.Redis): - result = await decoded_r.graph().list_keys() - assert result == [] - - await decoded_r.graph("G").query("CREATE (n)") - result = await decoded_r.graph().list_keys() - assert result == ["G"] - - await decoded_r.graph("X").query("CREATE (m)") - result = await decoded_r.graph().list_keys() - assert result == ["G", "X"] - - await decoded_r.delete("G") - await decoded_r.rename("X", "Z") - result = await decoded_r.graph().list_keys() - assert result == ["Z"] - - await decoded_r.delete("Z") - result = await decoded_r.graph().list_keys() - assert result == [] - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_multi_label(decoded_r: redis.Redis): - redis_graph = decoded_r.graph("g") - - node = Node(label=["l", "ll"]) - redis_graph.add_node(node) - await redis_graph.commit() - - query = "MATCH (n) RETURN n" - result = await redis_graph.query(query) - result_node = result.result_set[0][0] - assert result_node == node - - try: - Node(label=1) - assert False - except AssertionError: - assert True - - try: - Node(label=["l", 1]) - assert False - except AssertionError: - assert True - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_execution_plan(decoded_r: redis.Redis): - redis_graph = decoded_r.graph("execution_plan") - create_query = """CREATE - (:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), - (:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), - (:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - await redis_graph.query(create_query) - - result = await redis_graph.execution_plan( - "MATCH (r:Rider)-[:rides]->(t:Team) WHERE t.name = $name RETURN r.name, t.name, $params", # noqa - {"name": "Yehuda"}, - ) - expected = "Results\n Project\n Conditional Traverse | (t)->(r:Rider)\n Filter\n Node By Label Scan | (t:Team)" # noqa - assert result == expected - - await redis_graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -async def test_explain(decoded_r: redis.Redis): - redis_graph = decoded_r.graph("execution_plan") - # graph creation / population - create_query = """CREATE -(:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), -(:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), -(:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - await redis_graph.query(create_query) - - result = await redis_graph.explain( - """MATCH (r:Rider)-[:rides]->(t:Team) -WHERE t.name = $name -RETURN r.name, t.name -UNION -MATCH (r:Rider)-[:rides]->(t:Team) -WHERE t.name = $name -RETURN r.name, t.name""", - {"name": "Yamaha"}, - ) - expected = """\ -Results -Distinct - Join - Project - Conditional Traverse | (t)->(r:Rider) - Filter - Node By Label Scan | (t:Team) - Project - Conditional Traverse | (t)->(r:Rider) - Filter - Node By Label Scan | (t:Team)""" - assert str(result).replace(" ", "").replace("\n", "") == expected.replace( - " ", "" - ).replace("\n", "") - - expected = Operation("Results").append_child( - Operation("Distinct").append_child( - Operation("Join") - .append_child( - Operation("Project").append_child( - Operation("Conditional Traverse", "(t)->(r:Rider)").append_child( - Operation("Filter").append_child( - Operation("Node By Label Scan", "(t:Team)") - ) - ) - ) - ) - .append_child( - Operation("Project").append_child( - Operation("Conditional Traverse", "(t)->(r:Rider)").append_child( - Operation("Filter").append_child( - Operation("Node By Label Scan", "(t:Team)") - ) - ) - ) - ) - ) - ) - - assert result.structured_plan == expected - - result = await redis_graph.explain( - """MATCH (r:Rider), (t:Team) - RETURN r.name, t.name""" - ) - expected = """\ -Results -Project - Cartesian Product - Node By Label Scan | (r:Rider) - Node By Label Scan | (t:Team)""" - assert str(result).replace(" ", "").replace("\n", "") == expected.replace( - " ", "" - ).replace("\n", "") - - expected = Operation("Results").append_child( - Operation("Project").append_child( - Operation("Cartesian Product") - .append_child(Operation("Node By Label Scan")) - .append_child(Operation("Node By Label Scan")) - ) - ) - - assert result.structured_plan == expected - - await redis_graph.delete() diff --git a/tests/test_graph.py b/tests/test_graph.py deleted file mode 100644 index fd08385667..0000000000 --- a/tests/test_graph.py +++ /dev/null @@ -1,656 +0,0 @@ -from unittest.mock import patch - -import pytest -from redis import Redis -from redis.commands.graph import Edge, Node, Path -from redis.commands.graph.execution_plan import Operation -from redis.commands.graph.query_result import ( - CACHED_EXECUTION, - INDICES_CREATED, - INDICES_DELETED, - INTERNAL_EXECUTION_TIME, - LABELS_ADDED, - LABELS_REMOVED, - NODES_CREATED, - NODES_DELETED, - PROPERTIES_REMOVED, - PROPERTIES_SET, - RELATIONSHIPS_CREATED, - RELATIONSHIPS_DELETED, - QueryResult, -) -from redis.exceptions import ResponseError -from tests.conftest import _get_client, skip_if_redis_enterprise, skip_if_resp_version - - -@pytest.fixture -def client(request, stack_url): - r = _get_client( - Redis, request, decode_responses=True, from_url="redis://localhost:6480" - ) - r.flushdb() - return r - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_bulk(client): - with pytest.raises(NotImplementedError): - client.graph().bulk() - client.graph().bulk(foo="bar!") - - -@pytest.mark.graph -def test_graph_creation_throws_deprecation_warning(client): - """Verify that a DeprecationWarning is raised when creating a Graph instance.""" - - match = "RedisGraph support is deprecated as of Redis Stack 7.2" - with pytest.warns(DeprecationWarning, match=match): - client.graph() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_graph_creation(client): - graph = client.graph() - - john = Node( - label="person", - properties={ - "name": "John Doe", - "age": 33, - "gender": "male", - "status": "single", - }, - ) - graph.add_node(john) - japan = Node(label="country", properties={"name": "Japan"}) - - graph.add_node(japan) - edge = Edge(john, "visited", japan, properties={"purpose": "pleasure"}) - graph.add_edge(edge) - - graph.commit() - - query = ( - 'MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) RETURN p, v, c' - ) - - result = graph.query(query) - - person = result.result_set[0][0] - visit = result.result_set[0][1] - country = result.result_set[0][2] - - assert person == john - assert visit.properties == edge.properties - assert country == japan - - query = """RETURN [1, 2.3, "4", true, false, null]""" - result = graph.query(query) - assert [1, 2.3, "4", True, False, None] == result.result_set[0][0] - - # All done, remove graph. - graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_array_functions(client): - query = """CREATE (p:person{name:'a',age:32, array:[0,1,2]})""" - client.graph().query(query) - - query = """WITH [0,1,2] as x return x""" - result = client.graph().query(query) - assert [0, 1, 2] == result.result_set[0][0] - - query = """MATCH(n) return collect(n)""" - result = client.graph().query(query) - - a = Node( - node_id=0, - label="person", - properties={"name": "a", "age": 32, "array": [0, 1, 2]}, - ) - - assert [a] == result.result_set[0][0] - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_path(client): - node0 = Node(node_id=0, label="L1") - node1 = Node(node_id=1, label="L1") - edge01 = Edge(node0, "R1", node1, edge_id=0, properties={"value": 1}) - - graph = client.graph() - graph.add_node(node0) - graph.add_node(node1) - graph.add_edge(edge01) - graph.flush() - - path01 = Path.new_empty_path().add_node(node0).add_edge(edge01).add_node(node1) - expected_results = [[path01]] - - query = "MATCH p=(:L1)-[:R1]->(:L1) RETURN p ORDER BY p" - result = graph.query(query) - assert expected_results == result.result_set - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_param(client): - params = [1, 2.3, "str", True, False, None, [0, 1, 2], r"\" RETURN 1337 //"] - query = "RETURN $param" - for param in params: - result = client.graph().query(query, {"param": param}) - expected_results = [[param]] - assert expected_results == result.result_set - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_map(client): - query = "RETURN {a:1, b:'str', c:NULL, d:[1,2,3], e:True, f:{x:1, y:2}}" - - actual = client.graph().query(query).result_set[0][0] - expected = { - "a": 1, - "b": "str", - "c": None, - "d": [1, 2, 3], - "e": True, - "f": {"x": 1, "y": 2}, - } - - assert actual == expected - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_point(client): - query = "RETURN point({latitude: 32.070794860, longitude: 34.820751118})" - expected_lat = 32.070794860 - expected_lon = 34.820751118 - actual = client.graph().query(query).result_set[0][0] - assert abs(actual["latitude"] - expected_lat) < 0.001 - assert abs(actual["longitude"] - expected_lon) < 0.001 - - query = "RETURN point({latitude: 32, longitude: 34.0})" - expected_lat = 32 - expected_lon = 34 - actual = client.graph().query(query).result_set[0][0] - assert abs(actual["latitude"] - expected_lat) < 0.001 - assert abs(actual["longitude"] - expected_lon) < 0.001 - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_index_response(client): - result_set = client.graph().query("CREATE INDEX ON :person(age)") - assert 1 == result_set.indices_created - - result_set = client.graph().query("CREATE INDEX ON :person(age)") - assert 0 == result_set.indices_created - - result_set = client.graph().query("DROP INDEX ON :person(age)") - assert 1 == result_set.indices_deleted - - with pytest.raises(ResponseError): - client.graph().query("DROP INDEX ON :person(age)") - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_stringify_query_result(client): - graph = client.graph() - - john = Node( - alias="a", - label="person", - properties={ - "name": "John Doe", - "age": 33, - "gender": "male", - "status": "single", - }, - ) - graph.add_node(john) - - japan = Node(alias="b", label="country", properties={"name": "Japan"}) - graph.add_node(japan) - - edge = Edge(john, "visited", japan, properties={"purpose": "pleasure"}) - graph.add_edge(edge) - - assert ( - str(john) - == """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - ) - assert ( - str(edge) - == """(a:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - + """-[:visited{purpose:"pleasure"}]->""" - + """(b:country{name:"Japan"})""" - ) - assert str(japan) == """(b:country{name:"Japan"})""" - - graph.commit() - - query = """MATCH (p:person)-[v:visited {purpose:"pleasure"}]->(c:country) - RETURN p, v, c""" - - result = client.graph().query(query) - person = result.result_set[0][0] - visit = result.result_set[0][1] - country = result.result_set[0][2] - - assert ( - str(person) - == """(:person{age:33,gender:"male",name:"John Doe",status:"single"})""" # noqa - ) - assert str(visit) == """()-[:visited{purpose:"pleasure"}]->()""" - assert str(country) == """(:country{name:"Japan"})""" - - graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_optional_match(client): - # Build a graph of form (a)-[R]->(b) - node0 = Node(node_id=0, label="L1", properties={"value": "a"}) - node1 = Node(node_id=1, label="L1", properties={"value": "b"}) - - edge01 = Edge(node0, "R", node1, edge_id=0) - - graph = client.graph() - graph.add_node(node0) - graph.add_node(node1) - graph.add_edge(edge01) - graph.flush() - - # Issue a query that collects all outgoing edges from both nodes - # (the second has none) - query = """MATCH (a) OPTIONAL MATCH (a)-[e]->(b) RETURN a, e, b ORDER BY a.value""" # noqa - expected_results = [[node0, edge01, node1], [node1, None, None]] - - result = client.graph().query(query) - assert expected_results == result.result_set - - graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_cached_execution(client): - client.graph().query("CREATE ()") - - uncached_result = client.graph().query("MATCH (n) RETURN n, $param", {"param": [0]}) - assert uncached_result.cached_execution is False - - # loop to make sure the query is cached on each thread on server - for x in range(0, 64): - cached_result = client.graph().query( - "MATCH (n) RETURN n, $param", {"param": [0]} - ) - assert uncached_result.result_set == cached_result.result_set - - # should be cached on all threads by now - assert cached_result.cached_execution - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_slowlog(client): - create_query = """CREATE (:Rider - {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), - (:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), - (:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - client.graph().query(create_query) - - results = client.graph().slowlog() - assert results[0][1] == "GRAPH.QUERY" - assert results[0][2] == create_query - - -@pytest.mark.graph -@skip_if_resp_version(3) -@pytest.mark.xfail(strict=False) -def test_query_timeout(client): - # Build a sample graph with 1000 nodes. - client.graph().query("UNWIND range(0,1000) as val CREATE ({v: val})") - # Issue a long-running query with a 1-millisecond timeout. - with pytest.raises(ResponseError): - client.graph().query("MATCH (a), (b), (c), (d) RETURN *", timeout=1) - assert False is False - - with pytest.raises(Exception): - client.graph().query("RETURN 1", timeout="str") - assert False is False - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_read_only_query(client): - with pytest.raises(Exception): - # Issue a write query, specifying read-only true, - # this call should fail. - client.graph().query("CREATE (p:person {name:'a'})", read_only=True) - assert False is False - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_profile(client): - q = """UNWIND range(1, 3) AS x CREATE (p:Person {v:x})""" - profile = client.graph().profile(q).result_set - assert "Create | Records produced: 3" in profile - assert "Unwind | Records produced: 3" in profile - - q = "MATCH (p:Person) WHERE p.v > 1 RETURN p" - profile = client.graph().profile(q).result_set - assert "Results | Records produced: 2" in profile - assert "Project | Records produced: 2" in profile - assert "Filter | Records produced: 2" in profile - assert "Node By Label Scan | (p:Person) | Records produced: 3" in profile - - -@pytest.mark.graph -@skip_if_resp_version(3) -@skip_if_redis_enterprise() -def test_config(client): - config_name = "RESULTSET_SIZE" - config_value = 3 - - # Set configuration - response = client.graph().config(config_name, config_value, set=True) - assert response == "OK" - - # Make sure config been updated. - response = client.graph().config(config_name, set=False) - expected_response = [config_name, config_value] - assert response == expected_response - - config_name = "QUERY_MEM_CAPACITY" - config_value = 1 << 20 # 1MB - - # Set configuration - response = client.graph().config(config_name, config_value, set=True) - assert response == "OK" - - # Make sure config been updated. - response = client.graph().config(config_name, set=False) - expected_response = [config_name, config_value] - assert response == expected_response - - # reset to default - client.graph().config("QUERY_MEM_CAPACITY", 0, set=True) - client.graph().config("RESULTSET_SIZE", -100, set=True) - - -@pytest.mark.onlynoncluster -@pytest.mark.graph -@skip_if_resp_version(3) -def test_list_keys(client): - result = client.graph().list_keys() - assert result == [] - - client.graph("G").query("CREATE (n)") - result = client.graph().list_keys() - assert result == ["G"] - - client.graph("X").query("CREATE (m)") - result = client.graph().list_keys() - assert result == ["G", "X"] - - client.delete("G") - client.rename("X", "Z") - result = client.graph().list_keys() - assert result == ["Z"] - - client.delete("Z") - result = client.graph().list_keys() - assert result == [] - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_multi_label(client): - redis_graph = client.graph("g") - - node = Node(label=["l", "ll"]) - redis_graph.add_node(node) - redis_graph.commit() - - query = "MATCH (n) RETURN n" - result = redis_graph.query(query) - result_node = result.result_set[0][0] - assert result_node == node - - try: - Node(label=1) - assert False - except AssertionError: - assert True - - try: - Node(label=["l", 1]) - assert False - except AssertionError: - assert True - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_cache_sync(client): - pass - return - # This test verifies that client internal graph schema cache stays - # in sync with the graph schema - # - # Client B will try to get Client A out of sync by: - # 1. deleting the graph - # 2. reconstructing the graph in a different order, this will casuse - # a difference in the current mapping between string IDs and the - # mapping Client A is aware of - # - # Client A should pick up on the changes by comparing graph versions - # and resyncing its cache. - - A = client.graph("cache-sync") - B = client.graph("cache-sync") - - # Build order: - # 1. introduce label 'L' and 'K' - # 2. introduce attribute 'x' and 'q' - # 3. introduce relationship-type 'R' and 'S' - - A.query("CREATE (:L)") - B.query("CREATE (:K)") - A.query("MATCH (n) SET n.x = 1") - B.query("MATCH (n) SET n.q = 1") - A.query("MATCH (n) CREATE (n)-[:R]->()") - B.query("MATCH (n) CREATE (n)-[:S]->()") - - # Cause client A to populate its cache - A.query("MATCH (n)-[e]->() RETURN n, e") - - assert len(A._labels) == 2 - assert len(A._properties) == 2 - assert len(A._relationship_types) == 2 - assert A._labels[0] == "L" - assert A._labels[1] == "K" - assert A._properties[0] == "x" - assert A._properties[1] == "q" - assert A._relationship_types[0] == "R" - assert A._relationship_types[1] == "S" - - # Have client B reconstruct the graph in a different order. - B.delete() - - # Build order: - # 1. introduce relationship-type 'R' - # 2. introduce label 'L' - # 3. introduce attribute 'x' - B.query("CREATE ()-[:S]->()") - B.query("CREATE ()-[:R]->()") - B.query("CREATE (:K)") - B.query("CREATE (:L)") - B.query("MATCH (n) SET n.q = 1") - B.query("MATCH (n) SET n.x = 1") - - # A's internal cached mapping is now out of sync - # issue a query and make sure A's cache is synced. - A.query("MATCH (n)-[e]->() RETURN n, e") - - assert len(A._labels) == 2 - assert len(A._properties) == 2 - assert len(A._relationship_types) == 2 - assert A._labels[0] == "K" - assert A._labels[1] == "L" - assert A._properties[0] == "q" - assert A._properties[1] == "x" - assert A._relationship_types[0] == "S" - assert A._relationship_types[1] == "R" - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_execution_plan(client): - redis_graph = client.graph("execution_plan") - create_query = """CREATE - (:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), - (:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), - (:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - redis_graph.query(create_query) - - result = redis_graph.execution_plan( - "MATCH (r:Rider)-[:rides]->(t:Team) WHERE t.name = $name RETURN r.name, t.name, $params", # noqa - {"name": "Yehuda"}, - ) - expected = "Results\n Project\n Conditional Traverse | (t)->(r:Rider)\n Filter\n Node By Label Scan | (t:Team)" # noqa - assert result == expected - - redis_graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_explain(client): - redis_graph = client.graph("execution_plan") - # graph creation / population - create_query = """CREATE -(:Rider {name:'Valentino Rossi'})-[:rides]->(:Team {name:'Yamaha'}), -(:Rider {name:'Dani Pedrosa'})-[:rides]->(:Team {name:'Honda'}), -(:Rider {name:'Andrea Dovizioso'})-[:rides]->(:Team {name:'Ducati'})""" - redis_graph.query(create_query) - - result = redis_graph.explain( - """MATCH (r:Rider)-[:rides]->(t:Team) -WHERE t.name = $name -RETURN r.name, t.name -UNION -MATCH (r:Rider)-[:rides]->(t:Team) -WHERE t.name = $name -RETURN r.name, t.name""", - {"name": "Yamaha"}, - ) - expected = """\ -Results -Distinct - Join - Project - Conditional Traverse | (t)->(r:Rider) - Filter - Node By Label Scan | (t:Team) - Project - Conditional Traverse | (t)->(r:Rider) - Filter - Node By Label Scan | (t:Team)""" - assert str(result).replace(" ", "").replace("\n", "") == expected.replace( - " ", "" - ).replace("\n", "") - - expected = Operation("Results").append_child( - Operation("Distinct").append_child( - Operation("Join") - .append_child( - Operation("Project").append_child( - Operation("Conditional Traverse", "(t)->(r:Rider)").append_child( - Operation("Filter").append_child( - Operation("Node By Label Scan", "(t:Team)") - ) - ) - ) - ) - .append_child( - Operation("Project").append_child( - Operation("Conditional Traverse", "(t)->(r:Rider)").append_child( - Operation("Filter").append_child( - Operation("Node By Label Scan", "(t:Team)") - ) - ) - ) - ) - ) - ) - - assert result.structured_plan == expected - - result = redis_graph.explain( - """MATCH (r:Rider), (t:Team) - RETURN r.name, t.name""" - ) - expected = """\ -Results -Project - Cartesian Product - Node By Label Scan | (r:Rider) - Node By Label Scan | (t:Team)""" - assert str(result).replace(" ", "").replace("\n", "") == expected.replace( - " ", "" - ).replace("\n", "") - - expected = Operation("Results").append_child( - Operation("Project").append_child( - Operation("Cartesian Product") - .append_child(Operation("Node By Label Scan")) - .append_child(Operation("Node By Label Scan")) - ) - ) - - assert result.structured_plan == expected - - redis_graph.delete() - - -@pytest.mark.graph -@skip_if_resp_version(3) -def test_resultset_statistics(client): - with patch.object(target=QueryResult, attribute="_get_stat") as mock_get_stats: - result = client.graph().query("RETURN 1") - result.labels_added - mock_get_stats.assert_called_with(LABELS_ADDED) - result.labels_removed - mock_get_stats.assert_called_with(LABELS_REMOVED) - result.nodes_created - mock_get_stats.assert_called_with(NODES_CREATED) - result.nodes_deleted - mock_get_stats.assert_called_with(NODES_DELETED) - result.properties_set - mock_get_stats.assert_called_with(PROPERTIES_SET) - result.properties_removed - mock_get_stats.assert_called_with(PROPERTIES_REMOVED) - result.relationships_created - mock_get_stats.assert_called_with(RELATIONSHIPS_CREATED) - result.relationships_deleted - mock_get_stats.assert_called_with(RELATIONSHIPS_DELETED) - result.indices_created - mock_get_stats.assert_called_with(INDICES_CREATED) - result.indices_deleted - mock_get_stats.assert_called_with(INDICES_DELETED) - result.cached_execution - mock_get_stats.assert_called_with(CACHED_EXECUTION) - result.run_time_ms - mock_get_stats.assert_called_with(INTERNAL_EXECUTION_TIME) diff --git a/tests/test_graph_utils/__init__.py b/tests/test_graph_utils/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/test_graph_utils/test_edge.py b/tests/test_graph_utils/test_edge.py deleted file mode 100644 index 09e6fa08ed..0000000000 --- a/tests/test_graph_utils/test_edge.py +++ /dev/null @@ -1,75 +0,0 @@ -import pytest -from redis.commands.graph import edge, node - - -@pytest.mark.graph -def test_init(): - with pytest.raises(AssertionError): - edge.Edge(None, None, None) - edge.Edge(node.Node(), None, None) - edge.Edge(None, None, node.Node()) - - assert isinstance( - edge.Edge(node.Node(node_id=1), None, node.Node(node_id=2)), edge.Edge - ) - - -@pytest.mark.graph -def test_to_string(): - props_result = edge.Edge( - node.Node(), None, node.Node(), properties={"a": "a", "b": 10} - ).to_string() - assert props_result == '{a:"a",b:10}' - - no_props_result = edge.Edge( - node.Node(), None, node.Node(), properties={} - ).to_string() - assert no_props_result == "" - - -@pytest.mark.graph -def test_stringify(): - john = node.Node( - alias="a", - label="person", - properties={"name": "John Doe", "age": 33, "someArray": [1, 2, 3]}, - ) - japan = node.Node(alias="b", label="country", properties={"name": "Japan"}) - edge_with_relation = edge.Edge( - john, "visited", japan, properties={"purpose": "pleasure"} - ) - assert ( - '(a:person{age:33,name:"John Doe",someArray:[1, 2, 3]})' - '-[:visited{purpose:"pleasure"}]->' - '(b:country{name:"Japan"})' == str(edge_with_relation) - ) - - edge_no_relation_no_props = edge.Edge(japan, "", john) - assert ( - '(b:country{name:"Japan"})' - "-[]->" - '(a:person{age:33,name:"John Doe",someArray:[1, 2, 3]})' - == str(edge_no_relation_no_props) - ) - - edge_only_props = edge.Edge(john, "", japan, properties={"a": "b", "c": 3}) - assert ( - '(a:person{age:33,name:"John Doe",someArray:[1, 2, 3]})' - '-[{a:"b",c:3}]->' - '(b:country{name:"Japan"})' == str(edge_only_props) - ) - - -@pytest.mark.graph -def test_comparison(): - node1 = node.Node(node_id=1) - node2 = node.Node(node_id=2) - node3 = node.Node(node_id=3) - - edge1 = edge.Edge(node1, None, node2) - assert edge1 == edge.Edge(node1, None, node2) - assert edge1 != edge.Edge(node1, "bla", node2) - assert edge1 != edge.Edge(node1, None, node3) - assert edge1 != edge.Edge(node3, None, node2) - assert edge1 != edge.Edge(node2, None, node1) - assert edge1 != edge.Edge(node1, None, node2, properties={"a": 10}) diff --git a/tests/test_graph_utils/test_node.py b/tests/test_graph_utils/test_node.py deleted file mode 100644 index e9b8a54f43..0000000000 --- a/tests/test_graph_utils/test_node.py +++ /dev/null @@ -1,51 +0,0 @@ -import pytest -from redis.commands.graph import node - - -@pytest.fixture -def fixture(): - no_args = node.Node() - no_props = node.Node(node_id=1, alias="alias", label="l") - props_only = node.Node(properties={"a": "a", "b": 10}) - no_label = node.Node(node_id=1, alias="alias", properties={"a": "a"}) - multi_label = node.Node(node_id=1, alias="alias", label=["l", "ll"]) - return no_args, no_props, props_only, no_label, multi_label - - -@pytest.mark.graph -def test_to_string(fixture): - no_args, no_props, props_only, no_label, multi_label = fixture - assert no_args.to_string() == "" - assert no_props.to_string() == "" - assert props_only.to_string() == '{a:"a",b:10}' - assert no_label.to_string() == '{a:"a"}' - assert multi_label.to_string() == "" - - -@pytest.mark.graph -def test_stringify(fixture): - no_args, no_props, props_only, no_label, multi_label = fixture - assert str(no_args) == "()" - assert str(no_props) == "(alias:l)" - assert str(props_only) == '({a:"a",b:10})' - assert str(no_label) == '(alias{a:"a"})' - assert str(multi_label) == "(alias:l:ll)" - - -@pytest.mark.graph -def test_comparison(fixture): - no_args, no_props, props_only, no_label, multi_label = fixture - - assert node.Node() == node.Node() - assert node.Node(node_id=1) == node.Node(node_id=1) - assert node.Node(node_id=1) != node.Node(node_id=2) - assert node.Node(node_id=1, alias="a") == node.Node(node_id=1, alias="b") - assert node.Node(node_id=1, alias="a") == node.Node(node_id=1, alias="a") - assert node.Node(node_id=1, label="a") == node.Node(node_id=1, label="a") - assert node.Node(node_id=1, label="a") != node.Node(node_id=1, label="b") - assert node.Node(node_id=1, alias="a", label="l") == node.Node( - node_id=1, alias="a", label="l" - ) - assert node.Node(alias="a", label="l") != node.Node(alias="a", label="l1") - assert node.Node(properties={"a": 10}) == node.Node(properties={"a": 10}) - assert node.Node() != node.Node(properties={"a": 10}) diff --git a/tests/test_graph_utils/test_path.py b/tests/test_graph_utils/test_path.py deleted file mode 100644 index 33ca041cfa..0000000000 --- a/tests/test_graph_utils/test_path.py +++ /dev/null @@ -1,90 +0,0 @@ -import pytest -from redis.commands.graph import edge, node, path - - -@pytest.mark.graph -def test_init(): - with pytest.raises(TypeError): - path.Path(None, None) - path.Path([], None) - path.Path(None, []) - - assert isinstance(path.Path([], []), path.Path) - - -@pytest.mark.graph -def test_new_empty_path(): - new_empty_path = path.Path.new_empty_path() - assert isinstance(new_empty_path, path.Path) - assert new_empty_path._nodes == [] - assert new_empty_path._edges == [] - - -@pytest.mark.graph -def test_wrong_flows(): - node_1 = node.Node(node_id=1) - node_2 = node.Node(node_id=2) - node_3 = node.Node(node_id=3) - - edge_1 = edge.Edge(node_1, None, node_2) - edge_2 = edge.Edge(node_1, None, node_3) - - p = path.Path.new_empty_path() - with pytest.raises(AssertionError): - p.add_edge(edge_1) - - p.add_node(node_1) - with pytest.raises(AssertionError): - p.add_node(node_2) - - p.add_edge(edge_1) - with pytest.raises(AssertionError): - p.add_edge(edge_2) - - -@pytest.mark.graph -def test_nodes_and_edges(): - node_1 = node.Node(node_id=1) - node_2 = node.Node(node_id=2) - edge_1 = edge.Edge(node_1, None, node_2) - - p = path.Path.new_empty_path() - assert p.nodes() == [] - p.add_node(node_1) - assert [] == p.edges() - assert 0 == p.edge_count() - assert [node_1] == p.nodes() - assert node_1 == p.get_node(0) - assert node_1 == p.first_node() - assert node_1 == p.last_node() - assert 1 == p.nodes_count() - p.add_edge(edge_1) - assert [edge_1] == p.edges() - assert 1 == p.edge_count() - assert edge_1 == p.get_relationship(0) - p.add_node(node_2) - assert [node_1, node_2] == p.nodes() - assert node_1 == p.first_node() - assert node_2 == p.last_node() - assert 2 == p.nodes_count() - - -@pytest.mark.graph -def test_compare(): - node_1 = node.Node(node_id=1) - node_2 = node.Node(node_id=2) - edge_1 = edge.Edge(node_1, None, node_2) - - assert path.Path.new_empty_path() == path.Path.new_empty_path() - assert path.Path(nodes=[node_1, node_2], edges=[edge_1]) == path.Path( - nodes=[node_1, node_2], edges=[edge_1] - ) - assert path.Path(nodes=[node_1], edges=[]) != path.Path(nodes=[], edges=[]) - assert path.Path(nodes=[node_1], edges=[]) != path.Path(nodes=[], edges=[]) - assert path.Path(nodes=[node_1], edges=[]) != path.Path(nodes=[node_2], edges=[]) - assert path.Path(nodes=[node_1], edges=[edge_1]) != path.Path( - nodes=[node_1], edges=[] - ) - assert path.Path(nodes=[node_1], edges=[edge_1]) != path.Path( - nodes=[node_2], edges=[edge_1] - ) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 06265d382e..367700547f 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -5,7 +5,6 @@ list_or_args, nativestr, parse_to_list, - quote_string, random_string, ) @@ -41,15 +40,3 @@ def test_random_string(): assert len(random_string(15)) == 15 for a in random_string(): assert a in string.ascii_lowercase - - -def test_quote_string(): - assert quote_string("hello world!") == '"hello world!"' - assert quote_string("") == '""' - assert quote_string("hello world!") == '"hello world!"' - assert quote_string("abc") == '"abc"' - assert quote_string("") == '""' - assert quote_string('"') == r'"\""' - assert quote_string(r"foo \ bar") == r'"foo \\ bar"' - assert quote_string(r"foo \" bar") == r'"foo \\\" bar"' - assert quote_string('a"a') == r'"a\"a"'