From 1c453f12bf5cd2909697b27ba9ee6bb8ebc975d4 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 10:26:41 +0100 Subject: [PATCH 01/21] test load --- tests/test_search.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/test_search.py b/tests/test_search.py index d1fc75fb9d..70cfbff169 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -998,6 +998,29 @@ def test_aggregations(client): assert "RediSearch" == res[23] assert 2 == len(res[25]) +@pytest.mark.redismod +def test_load(client): + client.ft().create_index( + ( + TextField("t1"), + TextField("t2"), + ) + ) + + # Indexing a document + client.ft().add_document( + "doc1", + t1="hello", + t2="world", + ) + + req = aggregations.AggregateRequest("*").load("t1") + res = client.ft().aggregate(req) + assert res.rows[0] == ['t1', 'hello'] + + req = aggregations.AggregateRequest("*").load("t2") + res = client.ft().aggregate(req) + assert res.rows[0] == ['t2', 'world'] @pytest.mark.redismod @skip_ifmodversion_lt("2.0.0", "search") From 2231ea7003f8d27e2c539c515686588ee0221680 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 11:52:07 +0100 Subject: [PATCH 02/21] test sortby --- tests/test_search.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/test_search.py b/tests/test_search.py index 70cfbff169..d7898c279b 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -998,6 +998,39 @@ def test_aggregations(client): assert "RediSearch" == res[23] assert 2 == len(res[25]) + +@pytest.mark.redismod +def test_sortby(client): + client.ft().create_index( + ( + TextField("t1"), + TextField("t2"), + ) + ) + + # Indexing documents + client.ft().add_document( + "doc1", + t1="a", + t2="b", + ) + client.ft().add_document( + "doc2", + t1="b", + t2="a", + ) + + req = aggregations.AggregateRequest("*").sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) + res = client.ft().aggregate(req) + assert res.rows[0] == ['t2', 'a', 't1', 'b'] + assert res.rows[1] == ['t2', 'b', 't1', 'a'] + + req = aggregations.AggregateRequest("*").sort_by(aggregations.Asc("@t1")) + res = client.ft().aggregate(req) + assert res.rows[0] == ['t1', 'a'] + assert res.rows[1] == ['t1', 'b'] + + @pytest.mark.redismod def test_load(client): client.ft().create_index( @@ -1022,6 +1055,7 @@ def test_load(client): res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'world'] + @pytest.mark.redismod @skip_ifmodversion_lt("2.0.0", "search") def test_index_definition(client): From e18b8456d107246f9bb78171ae6060c7a77646a7 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 11:59:35 +0100 Subject: [PATCH 03/21] linters --- tests/test_search.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index d7898c279b..73e762ad1e 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1020,12 +1020,14 @@ def test_sortby(client): t2="a", ) - req = aggregations.AggregateRequest("*").sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) + req = aggregations.AggregateRequest("*")\ + .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'a', 't1', 'b'] assert res.rows[1] == ['t2', 'b', 't1', 'a'] - req = aggregations.AggregateRequest("*").sort_by(aggregations.Asc("@t1")) + req = aggregations.AggregateRequest("*")\ + .sort_by(aggregations.Asc("@t1")) res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] From dd400a3dea84daf03f58fcb677bee6a5260ee908 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 12:20:50 +0100 Subject: [PATCH 04/21] more cover for SortBy --- tests/test_search.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 73e762ad1e..430a27b6c4 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1000,7 +1000,7 @@ def test_aggregations(client): @pytest.mark.redismod -def test_sortby(client): +def test_aggregations_sort_by(client): client.ft().create_index( ( TextField("t1"), @@ -1027,14 +1027,14 @@ def test_sortby(client): assert res.rows[1] == ['t2', 'b', 't1', 'a'] req = aggregations.AggregateRequest("*")\ - .sort_by(aggregations.Asc("@t1")) + .sort_by("@t1", max=2) res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] @pytest.mark.redismod -def test_load(client): +def test_aggregations_load(client): client.ft().create_index( ( TextField("t1"), From 4b9e2391e9b50924c8bcd28ec6580b6b26d80e07 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 12:43:18 +0100 Subject: [PATCH 05/21] change add_document to hset --- tests/test_search.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 430a27b6c4..79f0f42b23 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1008,17 +1008,8 @@ def test_aggregations_sort_by(client): ) ) - # Indexing documents - client.ft().add_document( - "doc1", - t1="a", - t2="b", - ) - client.ft().add_document( - "doc2", - t1="b", - t2="a", - ) + client.ft().client.hset("doc1", mapping={'t1': 'a', 't2': 'b'}) + client.ft().client.hset("doc2", mapping={'t1': 'b', 't2': 'a'}) req = aggregations.AggregateRequest("*")\ .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) @@ -1042,6 +1033,25 @@ def test_aggregations_load(client): ) ) + client.ft().client.hset("doc1", mapping={'t1': 'hello', 't2': 'world'}) + + req = aggregations.AggregateRequest("*").load("t1") + res = client.ft().aggregate(req) + assert res.rows[0] == ['t1', 'hello'] + + req = aggregations.AggregateRequest("*").load("t2") + res = client.ft().aggregate(req) + assert res.rows[0] == ['t2', 'world'] + + +@pytest.mark.redismod +def test_aggregations_apply(client): + client.ft().create_index( + ( + NumericField("timestamp"), + ) + ) + # Indexing a document client.ft().add_document( "doc1", @@ -1053,10 +1063,6 @@ def test_aggregations_load(client): res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'hello'] - req = aggregations.AggregateRequest("*").load("t2") - res = client.ft().aggregate(req) - assert res.rows[0] == ['t2', 'world'] - @pytest.mark.redismod @skip_ifmodversion_lt("2.0.0", "search") From 2506fbba48157d3cdc0a0826805a4fa640a408ee Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 12:56:26 +0100 Subject: [PATCH 06/21] test apply --- tests/test_search.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 79f0f42b23..0389887ae5 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1048,20 +1048,31 @@ def test_aggregations_load(client): def test_aggregations_apply(client): client.ft().create_index( ( - NumericField("timestamp"), + TextField("PrimaryKey", sortable=True), + NumericField("CreatedDateTimeUTC", sortable=True), ) ) - # Indexing a document - client.ft().add_document( + client.ft().client.hset( "doc1", - t1="hello", - t2="world", + mapping={ + 'PrimaryKey': '9::362330', + 'CreatedDateTimeUTC': '637387878524969984' + } + ) + client.ft().client.hset( + "doc2", + mapping={ + 'PrimaryKey': '9::362329', + 'CreatedDateTimeUTC': '637387875859270016' + } ) - req = aggregations.AggregateRequest("*").load("t1") + req = aggregations.AggregateRequest("*")\ + .apply(CreatedDateTimeUTC='@CreatedDateTimeUTC * 10') res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'hello'] + assert res.rows[0] == ['CreatedDateTimeUTC', '6373878785249699840'] + assert res.rows[1] == ['CreatedDateTimeUTC', '6373878758592700416'] @pytest.mark.redismod From c2d1e50f50fd36a3706781904d0f25a15d4851bf Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 13:06:15 +0100 Subject: [PATCH 07/21] test limit --- tests/test_search.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/test_search.py b/tests/test_search.py index 0389887ae5..0808211e84 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1000,7 +1000,7 @@ def test_aggregations(client): @pytest.mark.redismod -def test_aggregations_sort_by(client): +def test_aggregations_sort_by_and_limit(client): client.ft().create_index( ( TextField("t1"), @@ -1023,6 +1023,12 @@ def test_aggregations_sort_by(client): assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] + req = aggregations.AggregateRequest("*")\ + .sort_by("@t1").limit(1, 1) + res = client.ft().aggregate(req) + assert len(res.rows) == 1 + assert res.rows[0] == ['t1', 'b'] + @pytest.mark.redismod def test_aggregations_load(client): From 3b57b1782383da10d12e7c0ab16415fc3bd561c7 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 13:08:24 +0100 Subject: [PATCH 08/21] comments --- tests/test_search.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_search.py b/tests/test_search.py index 0808211e84..f5f2b72f03 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1011,18 +1011,21 @@ def test_aggregations_sort_by_and_limit(client): client.ft().client.hset("doc1", mapping={'t1': 'a', 't2': 'b'}) client.ft().client.hset("doc2", mapping={'t1': 'b', 't2': 'a'}) + # test sort_by using SortDirection req = aggregations.AggregateRequest("*")\ .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'a', 't1', 'b'] assert res.rows[1] == ['t2', 'b', 't1', 'a'] + # test sort_by without SortDirection and with max req = aggregations.AggregateRequest("*")\ .sort_by("@t1", max=2) res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] + # test limit req = aggregations.AggregateRequest("*")\ .sort_by("@t1").limit(1, 1) res = client.ft().aggregate(req) @@ -1041,10 +1044,12 @@ def test_aggregations_load(client): client.ft().client.hset("doc1", mapping={'t1': 'hello', 't2': 'world'}) + # load t1 req = aggregations.AggregateRequest("*").load("t1") res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'hello'] + # load t2 req = aggregations.AggregateRequest("*").load("t2") res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'world'] From b78923d19dc11f04e19ebcaa566e9309f235961b Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 13:09:49 +0100 Subject: [PATCH 09/21] comments --- tests/test_search.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index f5f2b72f03..d7cb8e3328 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1018,13 +1018,19 @@ def test_aggregations_sort_by_and_limit(client): assert res.rows[0] == ['t2', 'a', 't1', 'b'] assert res.rows[1] == ['t2', 'b', 't1', 'a'] - # test sort_by without SortDirection and with max + # test sort_by without SortDirection req = aggregations.AggregateRequest("*")\ - .sort_by("@t1", max=2) + .sort_by("@t1") res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] + # test sort_by with max + req = aggregations.AggregateRequest("*")\ + .sort_by("@t1", max=1) + res = client.ft().aggregate(req) + assert len(res.rows) == 1 + # test limit req = aggregations.AggregateRequest("*")\ .sort_by("@t1").limit(1, 1) From 3aaaaa1972aaaf659d3ee0962e4948192c8a001a Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 13:19:14 +0100 Subject: [PATCH 10/21] test filter --- tests/test_search.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/test_search.py b/tests/test_search.py index d7cb8e3328..5e7107bbbc 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1092,6 +1092,43 @@ def test_aggregations_apply(client): assert res.rows[1] == ['CreatedDateTimeUTC', '6373878758592700416'] +@pytest.mark.redismod +def test_aggregations_filter(client): + client.ft().create_index( + ( + TextField("name", sortable=True), + NumericField("age", sortable=True), + ) + ) + + client.ft().client.hset( + "doc1", + mapping={ + 'name': 'bar', + 'age': '25' + } + ) + client.ft().client.hset( + "doc2", + mapping={ + 'name': 'foo', + 'age': '19' + } + ) + + req = aggregations.AggregateRequest("*")\ + .filter("@name=='foo' && @age < 20") + res = client.ft().aggregate(req) + assert len(res.rows) == 1 + assert res.rows[0] == ['name', 'foo', 'age', '19'] + + req = aggregations.AggregateRequest("*")\ + .filter("@age > 15").sort_by("@age") + res = client.ft().aggregate(req) + assert len(res.rows) == 2 + assert res.rows[0] == ['age', '19'] + assert res.rows[1] == ['age', '25'] + @pytest.mark.redismod @skip_ifmodversion_lt("2.0.0", "search") def test_index_definition(client): From 698466f4fb76775cde14f684c15c6f5de8ee1b6e Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 14:33:08 +0100 Subject: [PATCH 11/21] test filter --- redis/commands/search/aggregation.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/redis/commands/search/aggregation.py b/redis/commands/search/aggregation.py index b391d1f55b..3d71329c44 100644 --- a/redis/commands/search/aggregation.py +++ b/redis/commands/search/aggregation.py @@ -345,12 +345,6 @@ def cursor(self, count=0, max_idle=0.0): self._cursor = args return self - def _limit_2_args(self, limit): - if limit[1]: - return ["LIMIT"] + [str(x) for x in limit] - else: - return [] - def build_args(self): # @foo:bar ... ret = [self._query] From 0f68afd231c9c2094c8eb2451be11f827ee987de Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 14:35:11 +0100 Subject: [PATCH 12/21] flake8 --- tests/test_search.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_search.py b/tests/test_search.py index 5e7107bbbc..06c04fac8b 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1129,6 +1129,7 @@ def test_aggregations_filter(client): assert res.rows[0] == ['age', '19'] assert res.rows[1] == ['age', '25'] + @pytest.mark.redismod @skip_ifmodversion_lt("2.0.0", "search") def test_index_definition(client): From 151d14b43fbceb9c4bf340e6027f196752a86416 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 15:04:26 +0100 Subject: [PATCH 13/21] split groupby test --- tests/test_search.py | 226 +++++++++++++++++++++++++++++-------------- 1 file changed, 151 insertions(+), 75 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 06c04fac8b..15c2cd1399 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -82,8 +82,8 @@ def createIndex(client, num_docs=100, definition=None): try: client.create_index( (TextField("play", weight=5.0), - TextField("txt"), - NumericField("chapter")), + TextField("txt"), + NumericField("chapter")), definition=definition, ) except redis.ResponseError: @@ -192,8 +192,8 @@ def test_client(client): ) both_total = ( client.ft() - .search(Query("henry").no_content().limit_fields("play", "txt")) - .total + .search(Query("henry").no_content().limit_fields("play", "txt")) + .total ) assert 129 == txt_total assert 494 == play_total @@ -320,8 +320,8 @@ def test_stopwords(client): def test_filters(client): client.ft().create_index( (TextField("txt"), - NumericField("num"), - GeoField("loc")) + NumericField("num"), + GeoField("loc")) ) client.ft().add_document( "doc1", @@ -336,9 +336,9 @@ def test_filters(client): q1 = Query("foo").add_filter(NumericFilter("num", 0, 2)).no_content() q2 = ( Query("foo") - .add_filter( + .add_filter( NumericFilter("num", 2, NumericFilter.INF, minExclusive=True)) - .no_content() + .no_content() ) res1, res2 = client.ft().search(q1), client.ft().search(q2) @@ -379,7 +379,7 @@ def test_payloads_with_no_content(client): def test_sort_by(client): client.ft().create_index( (TextField("txt"), - NumericField("num", sortable=True)) + NumericField("num", sortable=True)) ) client.ft().add_document("doc1", txt="foo bar", num=1) client.ft().add_document("doc2", txt="foo baz", num=2) @@ -424,7 +424,7 @@ def test_example(client): # Creating the index definition and schema client.ft().create_index( (TextField("title", weight=5.0), - TextField("body")) + TextField("body")) ) # Indexing a document @@ -552,8 +552,8 @@ def test_no_index(client): def test_partial(client): client.ft().create_index( (TextField("f1"), - TextField("f2"), - TextField("f3")) + TextField("f2"), + TextField("f3")) ) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") @@ -574,8 +574,8 @@ def test_partial(client): def test_no_create(client): client.ft().create_index( (TextField("f1"), - TextField("f2"), - TextField("f3")) + TextField("f2"), + TextField("f3")) ) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") @@ -604,8 +604,8 @@ def test_no_create(client): def test_explain(client): client.ft().create_index( (TextField("f1"), - TextField("f2"), - TextField("f3")) + TextField("f2"), + TextField("f3")) ) res = client.ft().explain("@f3:f3_val @f2:f2_val @f1:f1_val") assert res @@ -629,8 +629,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry IV" == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) q = Query("king henry").paging(0, 1).summarize().highlight() @@ -638,8 +638,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry ... " == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) @@ -812,10 +812,10 @@ def test_spell_check(client): res = client.ft().spellcheck("lorm", include="dict") assert len(res["lorm"]) == 3 assert ( - res["lorm"][0]["suggestion"], - res["lorm"][1]["suggestion"], - res["lorm"][2]["suggestion"], - ) == ("lorem", "lore", "lorm") + res["lorm"][0]["suggestion"], + res["lorm"][1]["suggestion"], + res["lorm"][2]["suggestion"], + ) == ("lorem", "lore", "lorm") assert (res["lorm"][0]["score"], res["lorm"][1]["score"]) == ("0.5", "0") # test spellcheck exclude @@ -873,7 +873,8 @@ def test_scorer(client): ) client.ft().add_document( "doc2", - description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", # noqa + description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", + # noqa ) # default scorer is TFIDF @@ -909,12 +910,12 @@ def test_get(client): ) assert [ - ["f1", "some valid content dd2", "f2", "this is sample text ff2"] - ] == client.ft().get("doc2") + ["f1", "some valid content dd2", "f2", "this is sample text ff2"] + ] == client.ft().get("doc2") assert [ - ["f1", "some valid content dd1", "f2", "this is sample text ff1"], - ["f1", "some valid content dd2", "f2", "this is sample text ff2"], - ] == client.ft().get("doc1", "doc2") + ["f1", "some valid content dd1", "f2", "this is sample text ff1"], + ["f1", "some valid content dd2", "f2", "this is sample text ff2"], + ] == client.ft().get("doc1", "doc2") @pytest.mark.redismod @@ -930,7 +931,7 @@ def test_config(client): @pytest.mark.redismod -def test_aggregations(client): +def test_aggregations_groupby(client): # Creating the index definition and schema client.ft().create_index( ( @@ -967,36 +968,111 @@ def test_aggregations(client): req = aggregations.AggregateRequest("redis").group_by( "@parent", reducers.count(), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "3" + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.count_distinct("@title"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "3" + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.count_distinctish("@title"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "3" + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.sum("@random_num"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "21" # 10+8+3 + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.min("@random_num"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "3" # min(10,8,3) + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.max("@random_num"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "10" # max(10,8,3) + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.avg("@random_num"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "7" # (10+3+8)/3 + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.stddev("random_num"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "3.60555127546" + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.quantile("@random_num", 0.5), + ) + + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == "10" + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", reducers.tolist("@title"), - reducers.first_value("@title"), - reducers.random_sample("@title", 2), ) - res = client.ft().aggregate(req) + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[3] == ["RediSearch", "RedisAI", "RedisJson"] + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", + reducers.first_value("@title").alias("first"), + ) + + res = client.ft().aggregate(req).rows[0] + assert res == ['parent', 'redis', 'first', 'RediSearch'] + + req = aggregations.AggregateRequest("redis").group_by( + "@parent", + reducers.random_sample("@title", 2).alias("random"), + ) - res = res.rows[0] - assert len(res) == 26 - assert "redis" == res[1] - assert "3" == res[3] - assert "3" == res[5] - assert "3" == res[7] - assert "21" == res[9] - assert "3" == res[11] - assert "10" == res[13] - assert "7" == res[15] - assert "3.60555127546" == res[17] - assert "10" == res[19] - assert ["RediSearch", "RedisAI", "RedisJson"] == res[21] - assert "RediSearch" == res[23] - assert 2 == len(res[25]) + res = client.ft().aggregate(req).rows[0] + assert res[1] == "redis" + assert res[2] == "random" + assert len(res[3]) == 2 + assert res[3][0] in ["RediSearch", "RedisAI", "RedisJson"] @pytest.mark.redismod @@ -1012,27 +1088,27 @@ def test_aggregations_sort_by_and_limit(client): client.ft().client.hset("doc2", mapping={'t1': 'b', 't2': 'a'}) # test sort_by using SortDirection - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'a', 't1', 'b'] assert res.rows[1] == ['t2', 'b', 't1', 'a'] # test sort_by without SortDirection - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .sort_by("@t1") res = client.ft().aggregate(req) assert res.rows[0] == ['t1', 'a'] assert res.rows[1] == ['t1', 'b'] # test sort_by with max - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .sort_by("@t1", max=1) res = client.ft().aggregate(req) assert len(res.rows) == 1 # test limit - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .sort_by("@t1").limit(1, 1) res = client.ft().aggregate(req) assert len(res.rows) == 1 @@ -1085,7 +1161,7 @@ def test_aggregations_apply(client): } ) - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .apply(CreatedDateTimeUTC='@CreatedDateTimeUTC * 10') res = client.ft().aggregate(req) assert res.rows[0] == ['CreatedDateTimeUTC', '6373878785249699840'] @@ -1116,13 +1192,13 @@ def test_aggregations_filter(client): } ) - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .filter("@name=='foo' && @age < 20") res = client.ft().aggregate(req) assert len(res.rows) == 1 assert res.rows[0] == ['name', 'foo', 'age', '19'] - req = aggregations.AggregateRequest("*")\ + req = aggregations.AggregateRequest("*") \ .filter("@age > 15").sort_by("@age") res = client.ft().aggregate(req) assert len(res.rows) == 2 @@ -1151,25 +1227,25 @@ def test_index_definition(client): ) assert [ - "ON", - "JSON", - "PREFIX", - 2, - "hset:", - "henry", - "FILTER", - "@f1==32", - "LANGUAGE_FIELD", - "play", - "LANGUAGE", - "English", - "SCORE_FIELD", - "chapter", - "SCORE", - 0.5, - "PAYLOAD_FIELD", - "txt", - ] == definition.args + "ON", + "JSON", + "PREFIX", + 2, + "hset:", + "henry", + "FILTER", + "@f1==32", + "LANGUAGE_FIELD", + "play", + "LANGUAGE", + "English", + "SCORE_FIELD", + "chapter", + "SCORE", + 0.5, + "PAYLOAD_FIELD", + "txt", + ] == definition.args createIndex(client.ft(), num_docs=500, definition=definition) From c9f291dfb8ac3d0c2b905dd8026d242a4b2ffda1 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 15:09:51 +0100 Subject: [PATCH 14/21] flake8 --- tests/test_search.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 15c2cd1399..0cba3b74f6 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -192,8 +192,8 @@ def test_client(client): ) both_total = ( client.ft() - .search(Query("henry").no_content().limit_fields("play", "txt")) - .total + .search(Query("henry").no_content().limit_fields("play", "txt")) + .total ) assert 129 == txt_total assert 494 == play_total @@ -336,9 +336,9 @@ def test_filters(client): q1 = Query("foo").add_filter(NumericFilter("num", 0, 2)).no_content() q2 = ( Query("foo") - .add_filter( + .add_filter( NumericFilter("num", 2, NumericFilter.INF, minExclusive=True)) - .no_content() + .no_content() ) res1, res2 = client.ft().search(q1), client.ft().search(q2) @@ -873,8 +873,7 @@ def test_scorer(client): ) client.ft().add_document( "doc2", - description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", - # noqa + description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", # noqa ) # default scorer is TFIDF @@ -910,12 +909,12 @@ def test_get(client): ) assert [ - ["f1", "some valid content dd2", "f2", "this is sample text ff2"] - ] == client.ft().get("doc2") + ["f1", "some valid content dd2", "f2", "this is sample text ff2"] + ] == client.ft().get("doc2") assert [ - ["f1", "some valid content dd1", "f2", "this is sample text ff1"], - ["f1", "some valid content dd2", "f2", "this is sample text ff2"], - ] == client.ft().get("doc1", "doc2") + ["f1", "some valid content dd1", "f2", "this is sample text ff1"], + ["f1", "some valid content dd2", "f2", "this is sample text ff2"], + ] == client.ft().get("doc1", "doc2") @pytest.mark.redismod From 5e02ec98ea91cd868642cdfdd900c4a158139314 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 15 Nov 2021 16:10:26 +0100 Subject: [PATCH 15/21] support load_all --- redis/commands/search/aggregation.py | 14 +++++++++++--- tests/test_search.py | 5 +++++ 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/redis/commands/search/aggregation.py b/redis/commands/search/aggregation.py index 3d71329c44..4d0b52a63d 100644 --- a/redis/commands/search/aggregation.py +++ b/redis/commands/search/aggregation.py @@ -172,6 +172,7 @@ def __init__(self, query="*"): self._query = query self._aggregateplan = [] self._loadfields = [] + self._loadall = False self._limit = Limit() self._max = 0 self._with_schema = False @@ -185,9 +186,13 @@ def load(self, *fields): ### Parameters - - **fields**: One or more fields in the format of `@field` + - **fields**: If fields not specified, all the fields will be loaded. + Otherwise, should be given one or more fields in the format of `@field`. """ - self._loadfields.extend(fields) + if fields: + self._loadfields.extend(fields) + else: + self._loadall = True return self def group_by(self, fields, *reducers): @@ -358,7 +363,10 @@ def build_args(self): if self._cursor: ret += self._cursor - if self._loadfields: + if self._loadall: + ret.append("LOAD") + ret.append("*") + elif self._loadfields: ret.append("LOAD") ret.append(str(len(self._loadfields))) ret.extend(self._loadfields) diff --git a/tests/test_search.py b/tests/test_search.py index 0cba3b74f6..59732c09bc 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1135,6 +1135,11 @@ def test_aggregations_load(client): res = client.ft().aggregate(req) assert res.rows[0] == ['t2', 'world'] + # load all + req = aggregations.AggregateRequest("*").load() + res = client.ft().aggregate(req) + assert res.rows[0] == ['t1', 'hello', 't2', 'world'] + @pytest.mark.redismod def test_aggregations_apply(client): From a87f9b54a9c1431bd028f9b3bc76a8dbe2c71fa9 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 22 Nov 2021 08:36:16 +0100 Subject: [PATCH 16/21] merge --- tests/test_helpers.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 467e00c1fd..402eccf0a2 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -5,7 +5,8 @@ nativestr, parse_to_list, quote_string, - random_string + random_string, + parse_to_dict ) @@ -19,11 +20,34 @@ def test_list_or_args(): def test_parse_to_list(): + assert parse_to_list(None) == [] r = ["hello", b"my name", "45", "555.55", "is simon!", None] assert parse_to_list(r) == \ ["hello", "my name", 45, 555.55, "is simon!", None] +def test_parse_to_dict(): + assert parse_to_dict(None) == {} + r = [['Some number', '1.0345'], + ['Some string', 'hello'], + ['Child iterators', + ['Time', '0.2089', 'Counter', 3, 'Child iterators', + ['Type', 'bar', 'Time', '0.0729', 'Counter', 3], + ['Type', 'barbar', 'Time', '0.058', 'Counter', 3]]]] + assert parse_to_dict(r) == { + 'Child iterators': { + 'Child iterators': [ + {'Counter': 3.0, 'Time': 0.0729, 'Type': 'bar'}, + {'Counter': 3.0, 'Time': 0.058, 'Type': 'barbar'} + ], + 'Counter': 3.0, + 'Time': 0.2089 + }, + 'Some number': 1.0345, + 'Some string': 'hello' + } + + def test_nativestr(): assert nativestr('teststr') == 'teststr' assert nativestr(b'teststr') == 'teststr' From cebe58764b6b02922ccb089945cda7abcda6621f Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 22 Nov 2021 08:37:16 +0100 Subject: [PATCH 17/21] flake8 --- redis/commands/search/aggregation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis/commands/search/aggregation.py b/redis/commands/search/aggregation.py index 4d0b52a63d..31d56b0b68 100644 --- a/redis/commands/search/aggregation.py +++ b/redis/commands/search/aggregation.py @@ -187,7 +187,7 @@ def load(self, *fields): ### Parameters - **fields**: If fields not specified, all the fields will be loaded. - Otherwise, should be given one or more fields in the format of `@field`. + Otherwise, fields should be given in the format of `@field`. """ if fields: self._loadfields.extend(fields) From df495005e7fe55e67b56c0f7c2735a28983d97d2 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Mon, 22 Nov 2021 09:54:12 +0100 Subject: [PATCH 18/21] fix fail --- tests/test_helpers.py | 26 +------------------------- 1 file changed, 1 insertion(+), 25 deletions(-) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 402eccf0a2..467e00c1fd 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -5,8 +5,7 @@ nativestr, parse_to_list, quote_string, - random_string, - parse_to_dict + random_string ) @@ -20,34 +19,11 @@ def test_list_or_args(): def test_parse_to_list(): - assert parse_to_list(None) == [] r = ["hello", b"my name", "45", "555.55", "is simon!", None] assert parse_to_list(r) == \ ["hello", "my name", 45, 555.55, "is simon!", None] -def test_parse_to_dict(): - assert parse_to_dict(None) == {} - r = [['Some number', '1.0345'], - ['Some string', 'hello'], - ['Child iterators', - ['Time', '0.2089', 'Counter', 3, 'Child iterators', - ['Type', 'bar', 'Time', '0.0729', 'Counter', 3], - ['Type', 'barbar', 'Time', '0.058', 'Counter', 3]]]] - assert parse_to_dict(r) == { - 'Child iterators': { - 'Child iterators': [ - {'Counter': 3.0, 'Time': 0.0729, 'Type': 'bar'}, - {'Counter': 3.0, 'Time': 0.058, 'Type': 'barbar'} - ], - 'Counter': 3.0, - 'Time': 0.2089 - }, - 'Some number': 1.0345, - 'Some string': 'hello' - } - - def test_nativestr(): assert nativestr('teststr') == 'teststr' assert nativestr(b'teststr') == 'teststr' From e26e5c05e341496e3ca4deb12d6477aa8dafc5bc Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 1 Dec 2021 19:46:24 +0100 Subject: [PATCH 19/21] linters --- tests/test_search.py | 184 ++++++++++++++++--------------------------- 1 file changed, 67 insertions(+), 117 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 9402677462..40ba710f01 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -61,9 +61,7 @@ def getClient(): def createIndex(client, num_docs=100, definition=None): try: client.create_index( - (TextField("play", weight=5.0), - TextField("txt"), - NumericField("chapter")), + (TextField("play", weight=5.0), TextField("txt"), NumericField("chapter")), definition=definition, ) except redis.ResponseError: @@ -286,17 +284,8 @@ def test_stopwords(client): @pytest.mark.redismod def test_filters(client): - client.ft().create_index( - (TextField("txt"), - NumericField("num"), - GeoField("loc")) - ) - client.ft().add_document( - "doc1", - txt="foo bar", - num=3.141, - loc="-0.441,51.458" - ) + client.ft().create_index((TextField("txt"), NumericField("num"), GeoField("loc"))) + client.ft().add_document("doc1", txt="foo bar", num=3.141, loc="-0.441,51.458") client.ft().add_document("doc2", txt="foo baz", num=2, loc="-0.1,51.2") waitForIndex(client, "idx") @@ -342,10 +331,7 @@ def test_payloads_with_no_content(client): @pytest.mark.redismod def test_sort_by(client): - client.ft().create_index( - (TextField("txt"), - NumericField("num", sortable=True)) - ) + client.ft().create_index((TextField("txt"), NumericField("num", sortable=True))) client.ft().add_document("doc1", txt="foo bar", num=1) client.ft().add_document("doc2", txt="foo baz", num=2) client.ft().add_document("doc3", txt="foo qux", num=3) @@ -387,10 +373,7 @@ def test_drop_index(): @pytest.mark.redismod def test_example(client): # Creating the index definition and schema - client.ft().create_index( - (TextField("title", weight=5.0), - TextField("body")) - ) + client.ft().create_index((TextField("title", weight=5.0), TextField("body"))) # Indexing a document client.ft().add_document( @@ -510,11 +493,7 @@ def test_no_index(client): @pytest.mark.redismod def test_partial(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") client.ft().add_document("doc1", f3="f3_val", partial=True) @@ -532,11 +511,7 @@ def test_partial(client): @pytest.mark.redismod def test_no_create(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") client.ft().add_document("doc1", f3="f3_val", no_create=True) @@ -557,11 +532,7 @@ def test_no_create(client): @pytest.mark.redismod def test_explain(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) res = client.ft().explain("@f3:f3_val @f2:f2_val @f1:f1_val") assert res @@ -584,8 +555,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry IV" == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) q = Query("king henry").paging(0, 1).summarize().highlight() @@ -593,8 +564,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry ... " == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) @@ -763,10 +734,10 @@ def test_spell_check(client): res = client.ft().spellcheck("lorm", include="dict") assert len(res["lorm"]) == 3 assert ( - res["lorm"][0]["suggestion"], - res["lorm"][1]["suggestion"], - res["lorm"][2]["suggestion"], - ) == ("lorem", "lore", "lorm") + res["lorm"][0]["suggestion"], + res["lorm"][1]["suggestion"], + res["lorm"][2]["suggestion"], + ) == ("lorem", "lore", "lorm") assert (res["lorm"][0]["score"], res["lorm"][1]["score"]) == ("0.5", "0") # test spellcheck exclude @@ -1010,7 +981,7 @@ def test_aggregations_groupby(client): ) res = client.ft().aggregate(req).rows[0] - assert res == ['parent', 'redis', 'first', 'RediSearch'] + assert res == ["parent", "redis", "first", "RediSearch"] req = aggregations.AggregateRequest("redis").group_by( "@parent", @@ -1033,35 +1004,33 @@ def test_aggregations_sort_by_and_limit(client): ) ) - client.ft().client.hset("doc1", mapping={'t1': 'a', 't2': 'b'}) - client.ft().client.hset("doc2", mapping={'t1': 'b', 't2': 'a'}) + client.ft().client.hset("doc1", mapping={"t1": "a", "t2": "b"}) + client.ft().client.hset("doc2", mapping={"t1": "b", "t2": "a"}) # test sort_by using SortDirection - req = aggregations.AggregateRequest("*") \ - .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) + req = aggregations.AggregateRequest("*").sort_by( + aggregations.Asc("@t2"), aggregations.Desc("@t1") + ) res = client.ft().aggregate(req) - assert res.rows[0] == ['t2', 'a', 't1', 'b'] - assert res.rows[1] == ['t2', 'b', 't1', 'a'] + assert res.rows[0] == ["t2", "a", "t1", "b"] + assert res.rows[1] == ["t2", "b", "t1", "a"] # test sort_by without SortDirection - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1") + req = aggregations.AggregateRequest("*").sort_by("@t1") res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'a'] - assert res.rows[1] == ['t1', 'b'] + assert res.rows[0] == ["t1", "a"] + assert res.rows[1] == ["t1", "b"] # test sort_by with max - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1", max=1) + req = aggregations.AggregateRequest("*").sort_by("@t1", max=1) res = client.ft().aggregate(req) assert len(res.rows) == 1 # test limit - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1").limit(1, 1) + req = aggregations.AggregateRequest("*").sort_by("@t1").limit(1, 1) res = client.ft().aggregate(req) assert len(res.rows) == 1 - assert res.rows[0] == ['t1', 'b'] + assert res.rows[0] == ["t1", "b"] @pytest.mark.redismod @@ -1073,22 +1042,22 @@ def test_aggregations_load(client): ) ) - client.ft().client.hset("doc1", mapping={'t1': 'hello', 't2': 'world'}) + client.ft().client.hset("doc1", mapping={"t1": "hello", "t2": "world"}) # load t1 req = aggregations.AggregateRequest("*").load("t1") res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'hello'] + assert res.rows[0] == ["t1", "hello"] # load t2 req = aggregations.AggregateRequest("*").load("t2") res = client.ft().aggregate(req) - assert res.rows[0] == ['t2', 'world'] + assert res.rows[0] == ["t2", "world"] # load all req = aggregations.AggregateRequest("*").load() res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'hello', 't2', 'world'] + assert res.rows[0] == ["t1", "hello", "t2", "world"] @pytest.mark.redismod @@ -1102,24 +1071,19 @@ def test_aggregations_apply(client): client.ft().client.hset( "doc1", - mapping={ - 'PrimaryKey': '9::362330', - 'CreatedDateTimeUTC': '637387878524969984' - } + mapping={"PrimaryKey": "9::362330", "CreatedDateTimeUTC": "637387878524969984"}, ) client.ft().client.hset( "doc2", - mapping={ - 'PrimaryKey': '9::362329', - 'CreatedDateTimeUTC': '637387875859270016' - } + mapping={"PrimaryKey": "9::362329", "CreatedDateTimeUTC": "637387875859270016"}, ) - req = aggregations.AggregateRequest("*") \ - .apply(CreatedDateTimeUTC='@CreatedDateTimeUTC * 10') + req = aggregations.AggregateRequest("*").apply( + CreatedDateTimeUTC = "@CreatedDateTimeUTC * 10" + ) res = client.ft().aggregate(req) - assert res.rows[0] == ['CreatedDateTimeUTC', '6373878785249699840'] - assert res.rows[1] == ['CreatedDateTimeUTC', '6373878758592700416'] + assert res.rows[0] == ["CreatedDateTimeUTC", "6373878785249699840"] + assert res.rows[1] == ["CreatedDateTimeUTC", "6373878758592700416"] @pytest.mark.redismod @@ -1131,33 +1095,19 @@ def test_aggregations_filter(client): ) ) - client.ft().client.hset( - "doc1", - mapping={ - 'name': 'bar', - 'age': '25' - } - ) - client.ft().client.hset( - "doc2", - mapping={ - 'name': 'foo', - 'age': '19' - } - ) + client.ft().client.hset("doc1", mapping={"name": "bar", "age": "25"}) + client.ft().client.hset("doc2", mapping={"name": "foo", "age": "19"}) - req = aggregations.AggregateRequest("*") \ - .filter("@name=='foo' && @age < 20") + req = aggregations.AggregateRequest("*").filter("@name=='foo' && @age < 20") res = client.ft().aggregate(req) assert len(res.rows) == 1 - assert res.rows[0] == ['name', 'foo', 'age', '19'] + assert res.rows[0] == ["name", "foo", "age", "19"] - req = aggregations.AggregateRequest("*") \ - .filter("@age > 15").sort_by("@age") + req = aggregations.AggregateRequest("*").filter("@age > 15").sort_by("@age") res = client.ft().aggregate(req) assert len(res.rows) == 2 - assert res.rows[0] == ['age', '19'] - assert res.rows[1] == ['age', '25'] + assert res.rows[0] == ["age", "19"] + assert res.rows[1] == ["age", "25"] @pytest.mark.redismod @@ -1181,25 +1131,25 @@ def test_index_definition(client): ) assert [ - "ON", - "JSON", - "PREFIX", - 2, - "hset:", - "henry", - "FILTER", - "@f1==32", - "LANGUAGE_FIELD", - "play", - "LANGUAGE", - "English", - "SCORE_FIELD", - "chapter", - "SCORE", - 0.5, - "PAYLOAD_FIELD", - "txt", - ] == definition.args + "ON", + "JSON", + "PREFIX", + 2, + "hset:", + "henry", + "FILTER", + "@f1==32", + "LANGUAGE_FIELD", + "play", + "LANGUAGE", + "English", + "SCORE_FIELD", + "chapter", + "SCORE", + 0.5, + "PAYLOAD_FIELD", + "txt", + ] == definition.args createIndex(client.ft(), num_docs=500, definition=definition) From 8d807ef959ca43ba7e2863eabe2b8d6084423cba Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 1 Dec 2021 19:46:24 +0100 Subject: [PATCH 20/21] linters --- tests/test_search.py | 184 ++++++++++++++++--------------------------- 1 file changed, 67 insertions(+), 117 deletions(-) diff --git a/tests/test_search.py b/tests/test_search.py index 9402677462..d746b3c868 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -61,9 +61,7 @@ def getClient(): def createIndex(client, num_docs=100, definition=None): try: client.create_index( - (TextField("play", weight=5.0), - TextField("txt"), - NumericField("chapter")), + (TextField("play", weight=5.0), TextField("txt"), NumericField("chapter")), definition=definition, ) except redis.ResponseError: @@ -286,17 +284,8 @@ def test_stopwords(client): @pytest.mark.redismod def test_filters(client): - client.ft().create_index( - (TextField("txt"), - NumericField("num"), - GeoField("loc")) - ) - client.ft().add_document( - "doc1", - txt="foo bar", - num=3.141, - loc="-0.441,51.458" - ) + client.ft().create_index((TextField("txt"), NumericField("num"), GeoField("loc"))) + client.ft().add_document("doc1", txt="foo bar", num=3.141, loc="-0.441,51.458") client.ft().add_document("doc2", txt="foo baz", num=2, loc="-0.1,51.2") waitForIndex(client, "idx") @@ -342,10 +331,7 @@ def test_payloads_with_no_content(client): @pytest.mark.redismod def test_sort_by(client): - client.ft().create_index( - (TextField("txt"), - NumericField("num", sortable=True)) - ) + client.ft().create_index((TextField("txt"), NumericField("num", sortable=True))) client.ft().add_document("doc1", txt="foo bar", num=1) client.ft().add_document("doc2", txt="foo baz", num=2) client.ft().add_document("doc3", txt="foo qux", num=3) @@ -387,10 +373,7 @@ def test_drop_index(): @pytest.mark.redismod def test_example(client): # Creating the index definition and schema - client.ft().create_index( - (TextField("title", weight=5.0), - TextField("body")) - ) + client.ft().create_index((TextField("title", weight=5.0), TextField("body"))) # Indexing a document client.ft().add_document( @@ -510,11 +493,7 @@ def test_no_index(client): @pytest.mark.redismod def test_partial(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") client.ft().add_document("doc1", f3="f3_val", partial=True) @@ -532,11 +511,7 @@ def test_partial(client): @pytest.mark.redismod def test_no_create(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) client.ft().add_document("doc1", f1="f1_val", f2="f2_val") client.ft().add_document("doc2", f1="f1_val", f2="f2_val") client.ft().add_document("doc1", f3="f3_val", no_create=True) @@ -557,11 +532,7 @@ def test_no_create(client): @pytest.mark.redismod def test_explain(client): - client.ft().create_index( - (TextField("f1"), - TextField("f2"), - TextField("f3")) - ) + client.ft().create_index((TextField("f1"), TextField("f2"), TextField("f3"))) res = client.ft().explain("@f3:f3_val @f2:f2_val @f1:f1_val") assert res @@ -584,8 +555,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry IV" == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) q = Query("king henry").paging(0, 1).summarize().highlight() @@ -593,8 +564,8 @@ def test_summarize(client): doc = sorted(client.ft().search(q).docs)[0] assert "Henry ... " == doc.play assert ( - "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa - == doc.txt + "ACT I SCENE I. London. The palace. Enter KING HENRY, LORD JOHN OF LANCASTER, the EARL of WESTMORELAND, SIR... " # noqa + == doc.txt ) @@ -763,10 +734,10 @@ def test_spell_check(client): res = client.ft().spellcheck("lorm", include="dict") assert len(res["lorm"]) == 3 assert ( - res["lorm"][0]["suggestion"], - res["lorm"][1]["suggestion"], - res["lorm"][2]["suggestion"], - ) == ("lorem", "lore", "lorm") + res["lorm"][0]["suggestion"], + res["lorm"][1]["suggestion"], + res["lorm"][2]["suggestion"], + ) == ("lorem", "lore", "lorm") assert (res["lorm"][0]["score"], res["lorm"][1]["score"]) == ("0.5", "0") # test spellcheck exclude @@ -1010,7 +981,7 @@ def test_aggregations_groupby(client): ) res = client.ft().aggregate(req).rows[0] - assert res == ['parent', 'redis', 'first', 'RediSearch'] + assert res == ["parent", "redis", "first", "RediSearch"] req = aggregations.AggregateRequest("redis").group_by( "@parent", @@ -1033,35 +1004,33 @@ def test_aggregations_sort_by_and_limit(client): ) ) - client.ft().client.hset("doc1", mapping={'t1': 'a', 't2': 'b'}) - client.ft().client.hset("doc2", mapping={'t1': 'b', 't2': 'a'}) + client.ft().client.hset("doc1", mapping={"t1": "a", "t2": "b"}) + client.ft().client.hset("doc2", mapping={"t1": "b", "t2": "a"}) # test sort_by using SortDirection - req = aggregations.AggregateRequest("*") \ - .sort_by(aggregations.Asc("@t2"), aggregations.Desc("@t1")) + req = aggregations.AggregateRequest("*").sort_by( + aggregations.Asc("@t2"), aggregations.Desc("@t1") + ) res = client.ft().aggregate(req) - assert res.rows[0] == ['t2', 'a', 't1', 'b'] - assert res.rows[1] == ['t2', 'b', 't1', 'a'] + assert res.rows[0] == ["t2", "a", "t1", "b"] + assert res.rows[1] == ["t2", "b", "t1", "a"] # test sort_by without SortDirection - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1") + req = aggregations.AggregateRequest("*").sort_by("@t1") res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'a'] - assert res.rows[1] == ['t1', 'b'] + assert res.rows[0] == ["t1", "a"] + assert res.rows[1] == ["t1", "b"] # test sort_by with max - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1", max=1) + req = aggregations.AggregateRequest("*").sort_by("@t1", max=1) res = client.ft().aggregate(req) assert len(res.rows) == 1 # test limit - req = aggregations.AggregateRequest("*") \ - .sort_by("@t1").limit(1, 1) + req = aggregations.AggregateRequest("*").sort_by("@t1").limit(1, 1) res = client.ft().aggregate(req) assert len(res.rows) == 1 - assert res.rows[0] == ['t1', 'b'] + assert res.rows[0] == ["t1", "b"] @pytest.mark.redismod @@ -1073,22 +1042,22 @@ def test_aggregations_load(client): ) ) - client.ft().client.hset("doc1", mapping={'t1': 'hello', 't2': 'world'}) + client.ft().client.hset("doc1", mapping={"t1": "hello", "t2": "world"}) # load t1 req = aggregations.AggregateRequest("*").load("t1") res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'hello'] + assert res.rows[0] == ["t1", "hello"] # load t2 req = aggregations.AggregateRequest("*").load("t2") res = client.ft().aggregate(req) - assert res.rows[0] == ['t2', 'world'] + assert res.rows[0] == ["t2", "world"] # load all req = aggregations.AggregateRequest("*").load() res = client.ft().aggregate(req) - assert res.rows[0] == ['t1', 'hello', 't2', 'world'] + assert res.rows[0] == ["t1", "hello", "t2", "world"] @pytest.mark.redismod @@ -1102,24 +1071,19 @@ def test_aggregations_apply(client): client.ft().client.hset( "doc1", - mapping={ - 'PrimaryKey': '9::362330', - 'CreatedDateTimeUTC': '637387878524969984' - } + mapping={"PrimaryKey": "9::362330", "CreatedDateTimeUTC": "637387878524969984"}, ) client.ft().client.hset( "doc2", - mapping={ - 'PrimaryKey': '9::362329', - 'CreatedDateTimeUTC': '637387875859270016' - } + mapping={"PrimaryKey": "9::362329", "CreatedDateTimeUTC": "637387875859270016"}, ) - req = aggregations.AggregateRequest("*") \ - .apply(CreatedDateTimeUTC='@CreatedDateTimeUTC * 10') + req = aggregations.AggregateRequest("*").apply( + CreatedDateTimeUTC="@CreatedDateTimeUTC * 10" + ) res = client.ft().aggregate(req) - assert res.rows[0] == ['CreatedDateTimeUTC', '6373878785249699840'] - assert res.rows[1] == ['CreatedDateTimeUTC', '6373878758592700416'] + assert res.rows[0] == ["CreatedDateTimeUTC", "6373878785249699840"] + assert res.rows[1] == ["CreatedDateTimeUTC", "6373878758592700416"] @pytest.mark.redismod @@ -1131,33 +1095,19 @@ def test_aggregations_filter(client): ) ) - client.ft().client.hset( - "doc1", - mapping={ - 'name': 'bar', - 'age': '25' - } - ) - client.ft().client.hset( - "doc2", - mapping={ - 'name': 'foo', - 'age': '19' - } - ) + client.ft().client.hset("doc1", mapping={"name": "bar", "age": "25"}) + client.ft().client.hset("doc2", mapping={"name": "foo", "age": "19"}) - req = aggregations.AggregateRequest("*") \ - .filter("@name=='foo' && @age < 20") + req = aggregations.AggregateRequest("*").filter("@name=='foo' && @age < 20") res = client.ft().aggregate(req) assert len(res.rows) == 1 - assert res.rows[0] == ['name', 'foo', 'age', '19'] + assert res.rows[0] == ["name", "foo", "age", "19"] - req = aggregations.AggregateRequest("*") \ - .filter("@age > 15").sort_by("@age") + req = aggregations.AggregateRequest("*").filter("@age > 15").sort_by("@age") res = client.ft().aggregate(req) assert len(res.rows) == 2 - assert res.rows[0] == ['age', '19'] - assert res.rows[1] == ['age', '25'] + assert res.rows[0] == ["age", "19"] + assert res.rows[1] == ["age", "25"] @pytest.mark.redismod @@ -1181,25 +1131,25 @@ def test_index_definition(client): ) assert [ - "ON", - "JSON", - "PREFIX", - 2, - "hset:", - "henry", - "FILTER", - "@f1==32", - "LANGUAGE_FIELD", - "play", - "LANGUAGE", - "English", - "SCORE_FIELD", - "chapter", - "SCORE", - 0.5, - "PAYLOAD_FIELD", - "txt", - ] == definition.args + "ON", + "JSON", + "PREFIX", + 2, + "hset:", + "henry", + "FILTER", + "@f1==32", + "LANGUAGE_FIELD", + "play", + "LANGUAGE", + "English", + "SCORE_FIELD", + "chapter", + "SCORE", + 0.5, + "PAYLOAD_FIELD", + "txt", + ] == definition.args createIndex(client.ft(), num_docs=500, definition=definition) From b2d5c8dc3d7a27e7886e9d54bad5a25f4c7e402f Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 1 Dec 2021 20:10:26 +0100 Subject: [PATCH 21/21] linters --- tests/test_search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_search.py b/tests/test_search.py index d746b3c868..1a22b665a8 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -795,7 +795,7 @@ def test_scorer(client): ) client.ft().add_document( "doc2", - description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", # noqa + description="Quick alice was beginning to get very tired of sitting by her quick sister on the bank, and of having nothing to do.", # noqa ) # default scorer is TFIDF