From 765c99668bd9832a9dd523405bc54444b980c475 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Sun, 5 Mar 2023 15:45:40 +0530 Subject: [PATCH 001/288] Fix import regression + bump --- setup.py | 2 +- typesense/documents.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 43356cb..728c3cf 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='typesense', python_requires='>=3', - version='0.15.0', + version='0.15.1', packages=['examples', 'typesense'], install_requires=['requests'], url='https://typesense.org', diff --git a/typesense/documents.py b/typesense/documents.py index b0ac22d..31c7c94 100644 --- a/typesense/documents.py +++ b/typesense/documents.py @@ -55,7 +55,7 @@ def import_jsonl(self, documents_jsonl): # `documents` can be either a list of document objects (or) # JSONL-formatted string containing multiple documents def import_(self, documents, params=None, batch_size=None): - if isinstance(documents, Iterable): + if isinstance(documents, Iterable) and not isinstance(documents, (str, bytes)): if batch_size: response_objs = [] batch = [] From bb2a15c88cc79308053dc8dc56c29de17c8ab6cb Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Mon, 17 Jul 2023 14:56:27 +0530 Subject: [PATCH 002/288] Add analytics rules resource. --- examples/analytics_operations.py | 58 ++++++++++++++++++++++++++++++++ typesense/analytics_rule.py | 14 ++++++++ typesense/analytics_rules.py | 26 ++++++++++++++ typesense/client.py | 3 +- 4 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 examples/analytics_operations.py create mode 100644 typesense/analytics_rule.py create mode 100644 typesense/analytics_rules.py diff --git a/examples/analytics_operations.py b/examples/analytics_operations.py new file mode 100644 index 0000000..23f5f58 --- /dev/null +++ b/examples/analytics_operations.py @@ -0,0 +1,58 @@ +import typesense + +client = typesense.Client({ + 'api_key': 'abcd', + 'nodes': [{ + 'host': 'localhost', + 'port': '8108', + 'protocol': 'http' + }], + 'connection_timeout_seconds': 2 +}) + +# Drop pre-existing rule if any +try: + client.analytics_rules['top_queries'].delete() +except Exception as e: + pass + +# Create a new rule +create_response = client.analytics_rules.create({ + "name": "top_queries", + "type": "popular_queries", + "params": { + "source": { + "collections": ["products"] + }, + "destination": { + "collection": "top_queries" + }, + "limit": 1000 + } +}) +print(create_response) + +# Try to fetch it back +print(client.analytics_rules['top_queries'].retrieve()) + +# Update the rule +update_response = client.analytics_rules.upsert('top_queries', { + "name": "top_queries", + "type": "popular_queries", + "params": { + "source": { + "collections": ["products"] + }, + "destination": { + "collection": "top_queries" + }, + "limit": 100 + } +}) +print(update_response) + +# List all rules +print(client.analytics_rules.retrieve()) + +# Delete the rule +print(client.analytics_rules['top_queries'].delete()) diff --git a/typesense/analytics_rule.py b/typesense/analytics_rule.py new file mode 100644 index 0000000..1b7576a --- /dev/null +++ b/typesense/analytics_rule.py @@ -0,0 +1,14 @@ +class AnalyticsRule(object): + def __init__(self, api_call, rule_id): + self.api_call = api_call + self.rule_id = rule_id + + def _endpoint_path(self): + from .analytics_rules import AnalyticsRules + return u"{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, self.rule_id) + + def retrieve(self): + return self.api_call.get(self._endpoint_path()) + + def delete(self): + return self.api_call.delete(self._endpoint_path()) diff --git a/typesense/analytics_rules.py b/typesense/analytics_rules.py new file mode 100644 index 0000000..60a747d --- /dev/null +++ b/typesense/analytics_rules.py @@ -0,0 +1,26 @@ +from .analytics_rule import AnalyticsRule + + +class AnalyticsRules(object): + RESOURCE_PATH = '/analytics/rules' + + def __init__(self, api_call): + self.api_call = api_call + self.rules = {} + + def __getitem__(self, rule_id): + if rule_id not in self.rules: + self.rules[rule_id] = AnalyticsRule(self.api_call, rule_id) + + return self.rules[rule_id] + + def create(self, rule, params=None): + params = params or {} + return self.api_call.post(AnalyticsRules.RESOURCE_PATH, rule, params) + + def upsert(self, id, rule): + return self.api_call.put(u"{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, id), rule) + + def retrieve(self): + return self.api_call.get(AnalyticsRules.RESOURCE_PATH) + diff --git a/typesense/client.py b/typesense/client.py index aa9563d..3db25a6 100644 --- a/typesense/client.py +++ b/typesense/client.py @@ -6,7 +6,7 @@ from .operations import Operations from .configuration import Configuration from .api_call import ApiCall - +from .analytics_rules import AnalyticsRules class Client(object): def __init__(self, config_dict): @@ -16,5 +16,6 @@ def __init__(self, config_dict): self.multi_search = MultiSearch(self.api_call) self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) + self.analytics_rules = AnalyticsRules(self.api_call) self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) From aeb5313d4c1372c58fa8d950cb9190f11e6c3006 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Sat, 5 Aug 2023 18:13:24 +0530 Subject: [PATCH 003/288] Return helpful error when an empty list of docs are imported. --- examples/collection_operations.py | 9 ++++++++- typesense/documents.py | 3 +++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/examples/collection_operations.py b/examples/collection_operations.py index 299438d..e1bba69 100644 --- a/examples/collection_operations.py +++ b/examples/collection_operations.py @@ -2,7 +2,7 @@ import os import sys import typesense - +from typesense.exceptions import TypesenseClientError curr_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath(os.path.join(curr_dir, os.pardir))) @@ -131,6 +131,13 @@ # Deleting documents matching a filter query print(client.collections['books'].documents.delete({'filter_by': 'ratings_count: 4780653'})) +# Try importing empy list +try: + import_results = client.collections['books'].documents.import_([], {"action": "upsert"}) + print(import_results) +except TypesenseClientError as e: + print("Detected import of empty document list.") + # Drop the collection drop_response = client.collections['books'].delete() print(drop_response) diff --git a/typesense/documents.py b/typesense/documents.py index 31c7c94..2e3e201 100644 --- a/typesense/documents.py +++ b/typesense/documents.py @@ -74,6 +74,9 @@ def import_(self, documents, params=None, batch_size=None): for document in documents: document_strs.append(json.dumps(document)) + if len(document_strs) == 0: + raise TypesenseClientError(f"Cannot import an empty list of documents.") + docs_import = '\n'.join(document_strs) api_response = self.api_call.post(self._endpoint_path('import'), docs_import, params, as_json=False) res_obj_strs = api_response.split('\n') From 66ec088c98b3caaf7da5f5c355a83464235bbe26 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Sat, 5 Aug 2023 18:42:12 +0530 Subject: [PATCH 004/288] Support python True/False as values for POST params. --- examples/collection_operations.py | 1 + typesense/api_call.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/examples/collection_operations.py b/examples/collection_operations.py index e1bba69..60c1353 100644 --- a/examples/collection_operations.py +++ b/examples/collection_operations.py @@ -117,6 +117,7 @@ # Upserting documents import_results = client.collections['books'].documents.import_(docs_to_import, { 'action': 'upsert', + 'return_id': True }) print(import_results) diff --git a/typesense/api_call.py b/typesense/api_call.py index c9d8062..d3fe87b 100644 --- a/typesense/api_call.py +++ b/typesense/api_call.py @@ -133,6 +133,14 @@ def set_node_healthcheck(self, node, is_healthy): node.healthy = is_healthy node.last_access_ts = int(time.time()) + @staticmethod + def normalize_params(params): + for key in params.keys(): + if params[key] == True: + params[key] = 'true' + elif params[key] == False: + params[key] = 'false' + def get(self, endpoint, params=None, as_json=True): params = params or {} return self.make_request(requests.get, endpoint, as_json, @@ -141,6 +149,7 @@ def get(self, endpoint, params=None, as_json=True): def post(self, endpoint, body, params=None, as_json=True): params = params or {} + ApiCall.normalize_params(params) return self.make_request(requests.post, endpoint, as_json, params=params, data=body, timeout=self.config.connection_timeout_seconds) From 8e15e5d2eb0b25c33b5296089a15105f2e2ccf2e Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Fri, 11 Aug 2023 12:30:34 +0530 Subject: [PATCH 005/288] Move analytics rules to separate resource. --- examples/analytics_operations.py | 12 ++++++------ typesense/analytics.py | 6 ++++++ typesense/client.py | 4 ++-- 3 files changed, 14 insertions(+), 8 deletions(-) create mode 100644 typesense/analytics.py diff --git a/examples/analytics_operations.py b/examples/analytics_operations.py index 23f5f58..c625c99 100644 --- a/examples/analytics_operations.py +++ b/examples/analytics_operations.py @@ -12,12 +12,12 @@ # Drop pre-existing rule if any try: - client.analytics_rules['top_queries'].delete() + client.analytics.rules['top_queries'].delete() except Exception as e: pass # Create a new rule -create_response = client.analytics_rules.create({ +create_response = client.analytics.rules.create({ "name": "top_queries", "type": "popular_queries", "params": { @@ -33,10 +33,10 @@ print(create_response) # Try to fetch it back -print(client.analytics_rules['top_queries'].retrieve()) +print(client.analytics.rules['top_queries'].retrieve()) # Update the rule -update_response = client.analytics_rules.upsert('top_queries', { +update_response = client.analytics.rules.upsert('top_queries', { "name": "top_queries", "type": "popular_queries", "params": { @@ -52,7 +52,7 @@ print(update_response) # List all rules -print(client.analytics_rules.retrieve()) +print(client.analytics.rules.retrieve()) # Delete the rule -print(client.analytics_rules['top_queries'].delete()) +print(client.analytics.rules['top_queries'].delete()) diff --git a/typesense/analytics.py b/typesense/analytics.py new file mode 100644 index 0000000..bec4cbf --- /dev/null +++ b/typesense/analytics.py @@ -0,0 +1,6 @@ +from .analytics_rules import AnalyticsRules + + +class Analytics(object): + def __init__(self, api_call): + self.rules = AnalyticsRules(api_call) diff --git a/typesense/client.py b/typesense/client.py index 3db25a6..d56df0e 100644 --- a/typesense/client.py +++ b/typesense/client.py @@ -6,7 +6,7 @@ from .operations import Operations from .configuration import Configuration from .api_call import ApiCall -from .analytics_rules import AnalyticsRules +from .analytics import Analytics class Client(object): def __init__(self, config_dict): @@ -16,6 +16,6 @@ def __init__(self, config_dict): self.multi_search = MultiSearch(self.api_call) self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) - self.analytics_rules = AnalyticsRules(self.api_call) + self.analytics = Analytics(self.api_call) self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) From 2996a759cf6adc933350f786844a01a03a1201d4 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Fri, 11 Aug 2023 12:37:00 +0530 Subject: [PATCH 006/288] Bump version. --- README.md | 17 +++++++++-------- setup.py | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index fe6ee5c..298fa28 100644 --- a/README.md +++ b/README.md @@ -19,14 +19,15 @@ See detailed [API documentation](https://typesense.org/api). ## Compatibility | Typesense Server | typesense-python | -|------------------|----------------| -| \>= v0.23.0 | \>= v0.14.0 | -| \>= v0.21.0 | \>= v0.13.0 | -| \>= v0.20.0 | \>= v0.11.0 | -| \>= v0.19.0 | \>= v0.10.0 | -| \>= v0.17.0 | \>= v0.9.0 | -| \>= v0.16.0 | \>= v0.8.0 | -| \>= v0.15.0 | \>= v0.7.0 | +|------------------|------------------| +| \>= v0.25.0 | \>= v0.16.0 | +| \>= v0.23.0 | \>= v0.14.0 | +| \>= v0.21.0 | \>= v0.13.0 | +| \>= v0.20.0 | \>= v0.11.0 | +| \>= v0.19.0 | \>= v0.10.0 | +| \>= v0.17.0 | \>= v0.9.0 | +| \>= v0.16.0 | \>= v0.8.0 | +| \>= v0.15.0 | \>= v0.7.0 | ## Contributing diff --git a/setup.py b/setup.py index 728c3cf..03b2e63 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='typesense', python_requires='>=3', - version='0.15.1', + version='0.16.0', packages=['examples', 'typesense'], install_requires=['requests'], url='https://typesense.org', From e0070f4026f098b0e772f758880d9226132941a3 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 17 Aug 2023 15:31:28 -0700 Subject: [PATCH 007/288] Fix is_health.py example file name typo --- examples/{is_healty.py => is_healthy.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename examples/{is_healty.py => is_healthy.py} (100%) diff --git a/examples/is_healty.py b/examples/is_healthy.py similarity index 100% rename from examples/is_healty.py rename to examples/is_healthy.py From 7af90f93350c7dad5629d512f88b0d2aec6622ff Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Thu, 17 Aug 2023 15:36:27 -0700 Subject: [PATCH 008/288] Publish sdist along with wheel --- publish.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/publish.sh b/publish.sh index 8ef3ac3..f20e262 100755 --- a/publish.sh +++ b/publish.sh @@ -1,4 +1,5 @@ #!/usr/bin/env bash rm -rf dist/* +python3 setup.py sdist python3 setup.py bdist_wheel --universal twine upload dist/* From a0aa70d3217ddd3e83bbbccc05d977cc4665b229 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Sat, 19 Aug 2023 17:27:33 +0530 Subject: [PATCH 009/288] Fix update documents -- should be calling PATCH not POST. --- examples/collection_operations.py | 6 +++++- typesense/documents.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/collection_operations.py b/examples/collection_operations.py index 60c1353..62e012e 100644 --- a/examples/collection_operations.py +++ b/examples/collection_operations.py @@ -53,7 +53,7 @@ # Add a book hunger_games_book = { - 'id': '1', 'original_publication_year': 2008, 'authors': ['Suzanne Collins'], 'average_rating': 4.34, + 'id': '1', 'authors': ['Suzanne Collins'], 'average_rating': 4.34, 'publication_year': 2008, 'title': 'The Hunger Games', 'image_url': 'https://images.gr-assets.com/books/1447303603m/2767052.jpg', 'ratings_count': 4780653 @@ -125,6 +125,10 @@ schema_change = {"fields": [{"name": "in_stock", "optional": True, "type": "bool"}]} print(client.collections['books'].update(schema_change)) +# Update value matching a filter +updated_doc = {'publication_year': 2009} +print(client.collections['books'].documents.update(updated_doc, {'filter_by': 'publication_year: 2008'})) + # Drop the field schema_change = {"fields": [{"name": "in_stock", "drop": True}]} print(client.collections['books'].update(schema_change)) diff --git a/typesense/documents.py b/typesense/documents.py index 2e3e201..13dacf8 100644 --- a/typesense/documents.py +++ b/typesense/documents.py @@ -46,7 +46,7 @@ def upsert(self, document, params=None): def update(self, document, params=None): params = params or {} params['action'] = 'update' - return self.api_call.post(self._endpoint_path(), document, params) + return self.api_call.patch(self._endpoint_path(), document, params) def import_jsonl(self, documents_jsonl): logger.warning('`import_jsonl` is deprecated: please use `import_`.') From 1653a232af7bee9d2ee80b435edd4c3c19346dc8 Mon Sep 17 00:00:00 2001 From: Pierre Engles Date: Tue, 12 Sep 2023 12:26:55 +0200 Subject: [PATCH 010/288] Use requests session for reuse http conections (increase performance) https://stackoverflow.com/a/34491383 --- typesense/api_call.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/typesense/api_call.py b/typesense/api_call.py index d3fe87b..7d14766 100644 --- a/typesense/api_call.py +++ b/typesense/api_call.py @@ -8,7 +8,7 @@ RequestMalformed, RequestUnauthorized, RequestForbidden, ServerError, ServiceUnavailable, TypesenseClientError) from .logger import logger - +session = requests.session() class ApiCall(object): API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' @@ -143,27 +143,27 @@ def normalize_params(params): def get(self, endpoint, params=None, as_json=True): params = params or {} - return self.make_request(requests.get, endpoint, as_json, + return self.make_request(session.get, endpoint, as_json, params=params, timeout=self.config.connection_timeout_seconds) def post(self, endpoint, body, params=None, as_json=True): params = params or {} ApiCall.normalize_params(params) - return self.make_request(requests.post, endpoint, as_json, + return self.make_request(session.post, endpoint, as_json, params=params, data=body, timeout=self.config.connection_timeout_seconds) def put(self, endpoint, body, params=None): - return self.make_request(requests.put, endpoint, True, + return self.make_request(session.put, endpoint, True, params=params, data=body, timeout=self.config.connection_timeout_seconds) def patch(self, endpoint, body, params=None): - return self.make_request(requests.patch, endpoint, True, + return self.make_request(session.patch, endpoint, True, params=params, data=body, timeout=self.config.connection_timeout_seconds) def delete(self, endpoint, params=None): - return self.make_request(requests.delete, endpoint, True, + return self.make_request(session.delete, endpoint, True, params=params, timeout=self.config.connection_timeout_seconds) From e22e31df314371b41cc57527c4583f3451a2f63d Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Wed, 13 Sep 2023 06:36:37 +0530 Subject: [PATCH 011/288] Bump version. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 03b2e63..8976a56 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='typesense', python_requires='>=3', - version='0.16.0', + version='0.17.0', packages=['examples', 'typesense'], install_requires=['requests'], url='https://typesense.org', From 4504f121d400d3420e3460f4c0dc4b4fd82c377c Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 18 Sep 2023 15:48:50 -0700 Subject: [PATCH 012/288] Try moving from setup.py to pyproject.toml --- pyproject.toml | 34 +++++++++++++++++++ setup.py | 19 ----------- {typesense => src/typesense}/__init__.py | 3 ++ {typesense => src/typesense}/alias.py | 0 {typesense => src/typesense}/aliases.py | 0 {typesense => src/typesense}/analytics.py | 0 .../typesense}/analytics_rule.py | 0 .../typesense}/analytics_rules.py | 0 {typesense => src/typesense}/api_call.py | 0 {typesense => src/typesense}/client.py | 0 {typesense => src/typesense}/collection.py | 0 {typesense => src/typesense}/collections.py | 0 {typesense => src/typesense}/configuration.py | 0 {typesense => src/typesense}/debug.py | 0 {typesense => src/typesense}/document.py | 0 {typesense => src/typesense}/documents.py | 0 {typesense => src/typesense}/exceptions.py | 0 {typesense => src/typesense}/key.py | 0 {typesense => src/typesense}/keys.py | 0 {typesense => src/typesense}/logger.py | 0 {typesense => src/typesense}/multi_search.py | 0 {typesense => src/typesense}/operations.py | 0 {typesense => src/typesense}/override.py | 0 {typesense => src/typesense}/overrides.py | 0 {typesense => src/typesense}/preprocess.py | 0 {typesense => src/typesense}/synonym.py | 0 {typesense => src/typesense}/synonyms.py | 0 {typesense => src/typesense}/validation.py | 0 28 files changed, 37 insertions(+), 19 deletions(-) create mode 100644 pyproject.toml delete mode 100644 setup.py rename {typesense => src/typesense}/__init__.py (58%) rename {typesense => src/typesense}/alias.py (100%) rename {typesense => src/typesense}/aliases.py (100%) rename {typesense => src/typesense}/analytics.py (100%) rename {typesense => src/typesense}/analytics_rule.py (100%) rename {typesense => src/typesense}/analytics_rules.py (100%) rename {typesense => src/typesense}/api_call.py (100%) rename {typesense => src/typesense}/client.py (100%) rename {typesense => src/typesense}/collection.py (100%) rename {typesense => src/typesense}/collections.py (100%) rename {typesense => src/typesense}/configuration.py (100%) rename {typesense => src/typesense}/debug.py (100%) rename {typesense => src/typesense}/document.py (100%) rename {typesense => src/typesense}/documents.py (100%) rename {typesense => src/typesense}/exceptions.py (100%) rename {typesense => src/typesense}/key.py (100%) rename {typesense => src/typesense}/keys.py (100%) rename {typesense => src/typesense}/logger.py (100%) rename {typesense => src/typesense}/multi_search.py (100%) rename {typesense => src/typesense}/operations.py (100%) rename {typesense => src/typesense}/override.py (100%) rename {typesense => src/typesense}/overrides.py (100%) rename {typesense => src/typesense}/preprocess.py (100%) rename {typesense => src/typesense}/synonym.py (100%) rename {typesense => src/typesense}/synonyms.py (100%) rename {typesense => src/typesense}/validation.py (100%) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..031da77 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "typesense" +description = "Python client for Typesense, an open source and typo tolerant search engine." +authors = [ + {"name" = "Typesense", "email" = "contact@typesense.org"}, +] +readme = "README.md" +requires-python = ">=3" +keywords = ["search", "typesense"] +license = {"text" = "Apache 2.0"} +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", +] +dependencies = ["requests"] +dynamic = ["version"] + +[project.urls] +Documentation = "https://typesense.org/" +Source = "https://github.com/typesense/typesense-python" +Tracker = "https://github.com/typesense/typesense-python/issues" + +[tool.setuptools.dynamic] +version = {attr = "typesense.__version__"} + +[tool.setuptools.packages.find] +where = ["src"] diff --git a/setup.py b/setup.py deleted file mode 100644 index 8976a56..0000000 --- a/setup.py +++ /dev/null @@ -1,19 +0,0 @@ -from setuptools import setup - -from m2r import parse_from_file - -long_description = parse_from_file('README.md') - -setup( - name='typesense', - python_requires='>=3', - version='0.17.0', - packages=['examples', 'typesense'], - install_requires=['requests'], - url='https://typesense.org', - license='Apache 2.0', - author='Typesense', - author_email='contact@typesense.org', - description='Python client for Typesense, an open source and typo tolerant search engine.', - long_description=long_description, -) diff --git a/typesense/__init__.py b/src/typesense/__init__.py similarity index 58% rename from typesense/__init__.py rename to src/typesense/__init__.py index aa51949..14e64ad 100644 --- a/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1 +1,4 @@ from .client import Client # NOQA + + +__version__ = '0.17.1' diff --git a/typesense/alias.py b/src/typesense/alias.py similarity index 100% rename from typesense/alias.py rename to src/typesense/alias.py diff --git a/typesense/aliases.py b/src/typesense/aliases.py similarity index 100% rename from typesense/aliases.py rename to src/typesense/aliases.py diff --git a/typesense/analytics.py b/src/typesense/analytics.py similarity index 100% rename from typesense/analytics.py rename to src/typesense/analytics.py diff --git a/typesense/analytics_rule.py b/src/typesense/analytics_rule.py similarity index 100% rename from typesense/analytics_rule.py rename to src/typesense/analytics_rule.py diff --git a/typesense/analytics_rules.py b/src/typesense/analytics_rules.py similarity index 100% rename from typesense/analytics_rules.py rename to src/typesense/analytics_rules.py diff --git a/typesense/api_call.py b/src/typesense/api_call.py similarity index 100% rename from typesense/api_call.py rename to src/typesense/api_call.py diff --git a/typesense/client.py b/src/typesense/client.py similarity index 100% rename from typesense/client.py rename to src/typesense/client.py diff --git a/typesense/collection.py b/src/typesense/collection.py similarity index 100% rename from typesense/collection.py rename to src/typesense/collection.py diff --git a/typesense/collections.py b/src/typesense/collections.py similarity index 100% rename from typesense/collections.py rename to src/typesense/collections.py diff --git a/typesense/configuration.py b/src/typesense/configuration.py similarity index 100% rename from typesense/configuration.py rename to src/typesense/configuration.py diff --git a/typesense/debug.py b/src/typesense/debug.py similarity index 100% rename from typesense/debug.py rename to src/typesense/debug.py diff --git a/typesense/document.py b/src/typesense/document.py similarity index 100% rename from typesense/document.py rename to src/typesense/document.py diff --git a/typesense/documents.py b/src/typesense/documents.py similarity index 100% rename from typesense/documents.py rename to src/typesense/documents.py diff --git a/typesense/exceptions.py b/src/typesense/exceptions.py similarity index 100% rename from typesense/exceptions.py rename to src/typesense/exceptions.py diff --git a/typesense/key.py b/src/typesense/key.py similarity index 100% rename from typesense/key.py rename to src/typesense/key.py diff --git a/typesense/keys.py b/src/typesense/keys.py similarity index 100% rename from typesense/keys.py rename to src/typesense/keys.py diff --git a/typesense/logger.py b/src/typesense/logger.py similarity index 100% rename from typesense/logger.py rename to src/typesense/logger.py diff --git a/typesense/multi_search.py b/src/typesense/multi_search.py similarity index 100% rename from typesense/multi_search.py rename to src/typesense/multi_search.py diff --git a/typesense/operations.py b/src/typesense/operations.py similarity index 100% rename from typesense/operations.py rename to src/typesense/operations.py diff --git a/typesense/override.py b/src/typesense/override.py similarity index 100% rename from typesense/override.py rename to src/typesense/override.py diff --git a/typesense/overrides.py b/src/typesense/overrides.py similarity index 100% rename from typesense/overrides.py rename to src/typesense/overrides.py diff --git a/typesense/preprocess.py b/src/typesense/preprocess.py similarity index 100% rename from typesense/preprocess.py rename to src/typesense/preprocess.py diff --git a/typesense/synonym.py b/src/typesense/synonym.py similarity index 100% rename from typesense/synonym.py rename to src/typesense/synonym.py diff --git a/typesense/synonyms.py b/src/typesense/synonyms.py similarity index 100% rename from typesense/synonyms.py rename to src/typesense/synonyms.py diff --git a/typesense/validation.py b/src/typesense/validation.py similarity index 100% rename from typesense/validation.py rename to src/typesense/validation.py From ebf66264162e63db82e9574e2af3ef6fff92af00 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 18 Sep 2023 16:02:53 -0700 Subject: [PATCH 013/288] Use build --- publish.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/publish.sh b/publish.sh index f20e262..979ffdf 100755 --- a/publish.sh +++ b/publish.sh @@ -1,5 +1,4 @@ #!/usr/bin/env bash rm -rf dist/* -python3 setup.py sdist -python3 setup.py bdist_wheel --universal +python3 -m build twine upload dist/* From 56617c5a5dc9ada57884ca79e9b47b52e86e1618 Mon Sep 17 00:00:00 2001 From: Grant Jenks Date: Mon, 18 Sep 2023 16:03:01 -0700 Subject: [PATCH 014/288] Change name to typesense-alt --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 031da77..0d3eeb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] -name = "typesense" +name = "typesense-alt" description = "Python client for Typesense, an open source and typo tolerant search engine." authors = [ {"name" = "Typesense", "email" = "contact@typesense.org"}, From 3ae1ee39b4ee9234fbfbe9a657bacc1ea3093aa3 Mon Sep 17 00:00:00 2001 From: CodeSandwich Date: Tue, 7 Nov 2023 19:28:09 +0100 Subject: [PATCH 015/288] Add Node creation with a URL string --- typesense/configuration.py | 39 +++++++++++++++++++++++++------------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/typesense/configuration.py b/typesense/configuration.py index 165dbc6..c101a31 100644 --- a/typesense/configuration.py +++ b/typesense/configuration.py @@ -1,8 +1,18 @@ from .exceptions import ConfigError from .logger import logger - +from urllib.parse import urlparse class Node(object): + def __init__(self, url): + parsed = urlparse(url); + if not parsed.hostname: + raise ConfigError('Node URL does not contain the host name.') + if not parsed.port: + raise ConfigError('Node URL does not contain the port.') + if not parsed.scheme: + raise ConfigError('Node URL does not contain the protocol.') + self.__init__(parsed.hostname, parsed.port, parsed.path, parsed.scheme) + def __init__(self, host, port, path, protocol): self.host = host self.port = port @@ -21,19 +31,21 @@ def __init__(self, config_dict): Configuration.show_deprecation_warnings(config_dict) Configuration.validate_config_dict(config_dict) - node_dicts = config_dict.get('nodes', []) - self.nodes = [] - for node_dict in node_dicts: - self.nodes.append( - Node(node_dict['host'], node_dict['port'], node_dict.get('path', ''), node_dict['protocol']) - ) + for node_config in config_dict.get('nodes', []): + if isinstance(node_config, str): + node = Node(node_config) + else: + node = Node(node_config['host'], node_config['port'], node_config.get('path', ''), node_config['protocol']) + self.nodes.append(node) nearest_node = config_dict.get('nearest_node', None) - if nearest_node: - self.nearest_node = Node(nearest_node['host'], nearest_node['port'], nearest_node.get('path', ''), nearest_node['protocol']) - else: + if not nearest_node: self.nearest_node = None + else if isinstance(nearest_node, str): + self.nearest_node = Node(nearest_node) + else: + self.nearest_node = Node(nearest_node['host'], nearest_node['port'], nearest_node.get('path', ''), nearest_node['protocol']) self.api_key = config_dict.get('api_key', '') self.connection_timeout_seconds = config_dict.get('connection_timeout_seconds', 3.0) @@ -53,16 +65,18 @@ def validate_config_dict(config_dict): for node in nodes: if not Configuration.validate_node_fields(node): - raise ConfigError('`node` entry must be a dictionary with the following required keys: ' + raise ConfigError('`node` entry must be a URL string or a dictionary with the following required keys: ' 'host, port, protocol') nearest_node = config_dict.get('nearest_node', None) if nearest_node and not Configuration.validate_node_fields(nearest_node): - raise ConfigError('`nearest_node` entry must be a dictionary with the following required keys: ' + raise ConfigError('`nearest_node` entry must be a URL string or a dictionary with the following required keys: ' 'host, port, protocol') @staticmethod def validate_node_fields(node): + if isinstance(node, str): + return true expected_fields = {'host', 'port', 'protocol'} return expected_fields.issubset(node) @@ -76,4 +90,3 @@ def show_deprecation_warnings(config_dict): if config_dict.get('read_replica_nodes'): logger.warn('Deprecation warning: read_replica_nodes is now consolidated to nodes, starting with Typesense Server v0.12') - From 6993ec743cbf33bfd3f918601569f914c7f1463e Mon Sep 17 00:00:00 2001 From: Jason Bosco Date: Tue, 21 Nov 2023 12:31:23 -0600 Subject: [PATCH 016/288] Add support query params to collection delete --- typesense/collection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/typesense/collection.py b/typesense/collection.py index 0852fe6..8c1bc99 100644 --- a/typesense/collection.py +++ b/typesense/collection.py @@ -21,5 +21,5 @@ def retrieve(self): def update(self, schema_change): return self.api_call.patch(self._endpoint_path(), schema_change) - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self, params=None): + return self.api_call.delete(self._endpoint_path(), params) From ad561061028b1ec4e9fdcfa33f2cbe02e3c6475d Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Wed, 22 Nov 2023 06:31:05 +0530 Subject: [PATCH 017/288] Bump version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8976a56..11ab233 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name='typesense', python_requires='>=3', - version='0.17.0', + version='0.18.0', packages=['examples', 'typesense'], install_requires=['requests'], url='https://typesense.org', From c164d1f1152b63142d271be1c3abd44eef15be14 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Wed, 29 Nov 2023 13:37:34 +0530 Subject: [PATCH 018/288] Bump: beta version to test build. --- pyproject.toml | 2 +- src/typesense/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0d3eeb5..031da77 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools"] build-backend = "setuptools.build_meta" [project] -name = "typesense-alt" +name = "typesense" description = "Python client for Typesense, an open source and typo tolerant search engine." authors = [ {"name" = "Typesense", "email" = "contact@typesense.org"}, diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 14e64ad..441b4dc 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,4 +1,4 @@ from .client import Client # NOQA -__version__ = '0.17.1' +__version__ = '0.19.0.beta1' From e16dd8ebcda33d38b2fc75efa3b3d86a7a4abc4a Mon Sep 17 00:00:00 2001 From: Evgeny Liskovets Date: Thu, 14 Dec 2023 15:21:48 -0500 Subject: [PATCH 019/288] Allow to pass verify parameter to API client --- typesense/api_call.py | 11 ++++++----- typesense/configuration.py | 1 + 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/typesense/api_call.py b/typesense/api_call.py index 7d14766..6b880ce 100644 --- a/typesense/api_call.py +++ b/typesense/api_call.py @@ -145,25 +145,26 @@ def get(self, endpoint, params=None, as_json=True): params = params or {} return self.make_request(session.get, endpoint, as_json, params=params, - timeout=self.config.connection_timeout_seconds) + timeout=self.config.connection_timeout_seconds, verify=self.config.verify) def post(self, endpoint, body, params=None, as_json=True): params = params or {} ApiCall.normalize_params(params) return self.make_request(session.post, endpoint, as_json, params=params, data=body, - timeout=self.config.connection_timeout_seconds) + timeout=self.config.connection_timeout_seconds, verify=self.config.verify) def put(self, endpoint, body, params=None): return self.make_request(session.put, endpoint, True, params=params, data=body, - timeout=self.config.connection_timeout_seconds) + timeout=self.config.connection_timeout_seconds, verify=self.config.verify) def patch(self, endpoint, body, params=None): return self.make_request(session.patch, endpoint, True, params=params, data=body, - timeout=self.config.connection_timeout_seconds) + timeout=self.config.connection_timeout_seconds, verify=self.config.verify) def delete(self, endpoint, params=None): return self.make_request(session.delete, endpoint, True, - params=params, timeout=self.config.connection_timeout_seconds) + params=params, timeout=self.config.connection_timeout_seconds, + verify=self.config.verify) diff --git a/typesense/configuration.py b/typesense/configuration.py index c101a31..d72f5cd 100644 --- a/typesense/configuration.py +++ b/typesense/configuration.py @@ -52,6 +52,7 @@ def __init__(self, config_dict): self.num_retries = config_dict.get('num_retries', 3) self.retry_interval_seconds = config_dict.get('retry_interval_seconds', 1.0) self.healthcheck_interval_seconds = config_dict.get('healthcheck_interval_seconds', 60) + self.verify = config_dict.get("verify", True) @staticmethod def validate_config_dict(config_dict): From 69ae183c4d21de99c03fbca688dde06a19aa9df9 Mon Sep 17 00:00:00 2001 From: Evgeny Liskovets Date: Thu, 14 Dec 2023 19:13:56 -0500 Subject: [PATCH 020/288] Fix else if --- typesense/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/typesense/configuration.py b/typesense/configuration.py index d72f5cd..a7c7250 100644 --- a/typesense/configuration.py +++ b/typesense/configuration.py @@ -42,7 +42,7 @@ def __init__(self, config_dict): nearest_node = config_dict.get('nearest_node', None) if not nearest_node: self.nearest_node = None - else if isinstance(nearest_node, str): + elif isinstance(nearest_node, str): self.nearest_node = Node(nearest_node) else: self.nearest_node = Node(nearest_node['host'], nearest_node['port'], nearest_node.get('path', ''), nearest_node['protocol']) From 133e18e5422e17a8a007f87bbb65a900e17bb922 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Wed, 3 Jan 2024 06:14:39 +0530 Subject: [PATCH 021/288] Version bump --- src/typesense/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 441b4dc..c1ce9e1 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,4 +1,4 @@ from .client import Client # NOQA -__version__ = '0.19.0.beta1' +__version__ = '0.19.0' From e9e06b9fe57fb12938bd7fcd26422f9612e0ba08 Mon Sep 17 00:00:00 2001 From: Kian-Meng Ang Date: Sat, 30 Mar 2024 11:02:36 +0800 Subject: [PATCH 022/288] Fix typo, empy -> empty Found via `codespell -H` --- examples/collection_operations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/collection_operations.py b/examples/collection_operations.py index 62e012e..c55d5f0 100644 --- a/examples/collection_operations.py +++ b/examples/collection_operations.py @@ -136,7 +136,7 @@ # Deleting documents matching a filter query print(client.collections['books'].documents.delete({'filter_by': 'ratings_count: 4780653'})) -# Try importing empy list +# Try importing empty list try: import_results = client.collections['books'].documents.import_([], {"action": "upsert"}) print(import_results) From 6d7c77f20eb21af4d5ecefdb3f4e0a1324937975 Mon Sep 17 00:00:00 2001 From: Sergei Sokolov Date: Thu, 25 Apr 2024 10:02:32 +0200 Subject: [PATCH 023/288] Add stopwords to the Client --- src/typesense/client.py | 2 ++ src/typesense/stopwords.py | 21 +++++++++++++++++++++ src/typesense/stopwords_set.py | 14 ++++++++++++++ 3 files changed, 37 insertions(+) create mode 100644 src/typesense/stopwords.py create mode 100644 src/typesense/stopwords_set.py diff --git a/src/typesense/client.py b/src/typesense/client.py index d56df0e..f12e02d 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -7,6 +7,7 @@ from .configuration import Configuration from .api_call import ApiCall from .analytics import Analytics +from .stopwords import Stopwords class Client(object): def __init__(self, config_dict): @@ -19,3 +20,4 @@ def __init__(self, config_dict): self.analytics = Analytics(self.api_call) self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) + self.stopwords = Stopwords(self.api_call) diff --git a/src/typesense/stopwords.py b/src/typesense/stopwords.py new file mode 100644 index 0000000..d67208d --- /dev/null +++ b/src/typesense/stopwords.py @@ -0,0 +1,21 @@ +from .stopwords_set import StopwordsSet + + +class Stopwords(object): + RESOURCE_PATH = '/stopwords' + + def __init__(self, api_call): + self.api_call = api_call + self.stopwords_sets = {} + + def __getitem__(self, stopwords_set_id): + if stopwords_set_id not in self.stopwords_sets: + self.stopwords_sets[stopwords_set_id] = StopwordsSet(self.api_call, stopwords_set_id) + + return self.stopwords_sets.get(stopwords_set_id) + + def upsert(self, stopwords_set_id, stopwords_set): + return self.api_call.put('{}/{}'.format(Stopwords.RESOURCE_PATH, stopwords_set_id), stopwords_set) + + def retrieve(self): + return self.api_call.get('{0}'.format(Stopwords.RESOURCE_PATH)) diff --git a/src/typesense/stopwords_set.py b/src/typesense/stopwords_set.py new file mode 100644 index 0000000..18052d8 --- /dev/null +++ b/src/typesense/stopwords_set.py @@ -0,0 +1,14 @@ +class StopwordsSet(object): + def __init__(self, api_call, stopwords_set_id): + self.stopwords_set_id = stopwords_set_id + self.api_call = api_call + + def _endpoint_path(self): + from .stopwords import Stopwords + return u"{0}/{1}".format(Stopwords.RESOURCE_PATH, self.stopwords_set_id) + + def retrieve(self): + return self.api_call.get(self._endpoint_path()) + + def delete(self): + return self.api_call.delete(self._endpoint_path()) From e4459276abd9581be35c74610305703129364d36 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Fri, 26 Apr 2024 17:39:36 +0530 Subject: [PATCH 024/288] Bump version --- src/typesense/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index c1ce9e1..a81d8ec 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,4 +1,5 @@ from .client import Client # NOQA -__version__ = '0.19.0' +__version__ = '0.20.0' + From 82a1afd660b02636021fed37626fb93136384b15 Mon Sep 17 00:00:00 2001 From: Sergei Sokolov Date: Fri, 26 Apr 2024 14:14:30 +0200 Subject: [PATCH 025/288] Update compatibility table in readmie --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 298fa28..e13a171 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,7 @@ See detailed [API documentation](https://typesense.org/api). | Typesense Server | typesense-python | |------------------|------------------| +| \>= v0.26.0 | \>= v0.20.0 | | \>= v0.25.0 | \>= v0.16.0 | | \>= v0.23.0 | \>= v0.14.0 | | \>= v0.21.0 | \>= v0.13.0 | From 2b306e36c4faacc094d795a23b49b00572737a74 Mon Sep 17 00:00:00 2001 From: Dima Boger Date: Fri, 3 May 2024 13:52:40 +0200 Subject: [PATCH 026/288] Add support for conversations/models APIs --- README.md | 2 +- src/typesense/__init__.py | 2 +- src/typesense/client.py | 2 ++ src/typesense/conversation_model.py | 17 +++++++++++++++++ src/typesense/conversations_models.py | 21 +++++++++++++++++++++ 5 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 src/typesense/conversation_model.py create mode 100644 src/typesense/conversations_models.py diff --git a/README.md b/README.md index e13a171..bbe9f08 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ See detailed [API documentation](https://typesense.org/api). | Typesense Server | typesense-python | |------------------|------------------| -| \>= v0.26.0 | \>= v0.20.0 | +| \>= v26.0 | \>= v0.20.0 | | \>= v0.25.0 | \>= v0.16.0 | | \>= v0.23.0 | \>= v0.14.0 | | \>= v0.21.0 | \>= v0.13.0 | diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index a81d8ec..6e45291 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,5 +1,5 @@ from .client import Client # NOQA -__version__ = '0.20.0' +__version__ = '0.21.0' diff --git a/src/typesense/client.py b/src/typesense/client.py index f12e02d..d7d7a21 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -1,4 +1,5 @@ from .aliases import Aliases +from .conversations_models import ConversationsModels from .debug import Debug from .collections import Collections from .multi_search import MultiSearch @@ -21,3 +22,4 @@ def __init__(self, config_dict): self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) + self.conversations_models = ConversationsModels(self.api_call) diff --git a/src/typesense/conversation_model.py b/src/typesense/conversation_model.py new file mode 100644 index 0000000..da84aba --- /dev/null +++ b/src/typesense/conversation_model.py @@ -0,0 +1,17 @@ +class ConversationModel(object): + def __init__(self, api_call, model_id): + self.model_id = model_id + self.api_call = api_call + + def _endpoint_path(self): + from .conversations_models import ConversationsModels + return u"{0}/{1}".format(ConversationsModels.RESOURCE_PATH, self.model_id) + + def retrieve(self): + return self.api_call.get(self._endpoint_path()) + + def update(self, model): + return self.api_call.put(self._endpoint_path(), model) + + def delete(self): + return self.api_call.delete(self._endpoint_path()) diff --git a/src/typesense/conversations_models.py b/src/typesense/conversations_models.py new file mode 100644 index 0000000..58fd3a7 --- /dev/null +++ b/src/typesense/conversations_models.py @@ -0,0 +1,21 @@ +from .conversation_model import ConversationModel + + +class ConversationsModels(object): + RESOURCE_PATH = '/conversations/models' + + def __init__(self, api_call): + self.api_call = api_call + self.conversations_models = {} + + def __getitem__(self, model_id): + if model_id not in self.conversations_models: + self.conversations_models[model_id] = ConversationModel(self.api_call, model_id) + + return self.conversations_models.get(model_id) + + def create(self, model): + return self.api_call.post(ConversationsModels.RESOURCE_PATH, model) + + def retrieve(self): + return self.api_call.get(ConversationsModels.RESOURCE_PATH) From 9f89ac1fdcb7a5a32d7083d8733b148ce5be9b71 Mon Sep 17 00:00:00 2001 From: hjh Date: Thu, 25 Jul 2024 14:56:21 +0900 Subject: [PATCH 027/288] Fix: API client to normalize only booleans not integers --- src/typesense/api_call.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 6b880ce..8b49e32 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -136,9 +136,9 @@ def set_node_healthcheck(self, node, is_healthy): @staticmethod def normalize_params(params): for key in params.keys(): - if params[key] == True: + if isinstance(params[key], bool) and params[key]: params[key] = 'true' - elif params[key] == False: + elif isinstance(params[key], bool) and not params[key]: params[key] = 'false' def get(self, endpoint, params=None, as_json=True): From e88f75650e03a1fe0e5af8cad121221a2f6f188d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 12:25:28 +0300 Subject: [PATCH 028/288] chore: add pytest and mypy --- pytest.ini | 2 ++ requirements.txt | 14 +++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..fcccae1 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +pythonpath = src diff --git a/requirements.txt b/requirements.txt index 566083c..d79b503 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,13 @@ -requests==2.22.0 +certifi==2024.7.4 +chardet==3.0.4 +charset-normalizer==3.3.2 +idna==2.8 +iniconfig==2.0.0 +mypy==1.11.0 +mypy-extensions==1.0.0 +packaging==24.1 +pluggy==1.5.0 +pytest==8.3.2 +requests==2.32.3 +typing_extensions==4.12.2 +urllib3==2.2.2 From 6ac5c3e8fbd4dffe24f9b0080335ca81176c715b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 12:49:19 +0300 Subject: [PATCH 029/288] fix: remove double defined constructor for node class - Remove double-defined constructor in Node class, leading to errors when initializing with a string - Add `from_url` class method for creating Node instances from URLs - Enhance type hints for Node constructor parameters - Update Configuration class to use new Node.from_url method --- src/typesense/configuration.py | 36 ++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 67485c2..30eb870 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -1,19 +1,17 @@ +from typing import Literal from .exceptions import ConfigError from .logger import logger from urllib.parse import urlparse -class Node(object): - def __init__(self, url): - parsed = urlparse(url); - if not parsed.hostname: - raise ConfigError('Node URL does not contain the host name.') - if not parsed.port: - raise ConfigError('Node URL does not contain the port.') - if not parsed.scheme: - raise ConfigError('Node URL does not contain the protocol.') - self.__init__(parsed.hostname, parsed.port, parsed.path, parsed.scheme) - def __init__(self, host, port, path, protocol): +class Node(object): + def __init__( + self, + host: str, + port: str | int, + path: str, + protocol: Literal['http', 'https'] | str, + ): self.host = host self.port = port self.path = path @@ -22,6 +20,18 @@ def __init__(self, host, port, path, protocol): # Used to skip bad hosts self.healthy = True + @classmethod + def from_url(cls, url: str) -> 'Node': + parsed = urlparse(url) + if not parsed.hostname: + raise ConfigError('Node URL does not contain the host name.') + if not parsed.port: + raise ConfigError('Node URL does not contain the port.') + if not parsed.scheme: + raise ConfigError('Node URL does not contain the protocol.') + + return cls(parsed.hostname, parsed.port, parsed.path, parsed.scheme) + def url(self): return '{0}://{1}:{2}{3}'.format(self.protocol, self.host, self.port, self.path) @@ -34,7 +44,7 @@ def __init__(self, config_dict): self.nodes = [] for node_config in config_dict.get('nodes', []): if isinstance(node_config, str): - node = Node(node_config) + node = Node.from_url(node_config) else: node = Node(node_config['host'], node_config['port'], node_config.get('path', ''), node_config['protocol']) self.nodes.append(node) @@ -43,7 +53,7 @@ def __init__(self, config_dict): if not nearest_node: self.nearest_node = None elif isinstance(nearest_node, str): - self.nearest_node = Node(nearest_node) + self.nearest_node = Node.from_url(nearest_node) else: self.nearest_node = Node(nearest_node['host'], nearest_node['port'], nearest_node.get('path', ''), nearest_node['protocol']) From f01084bfb9954389981242c2a51d029acc075a81 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 12:50:39 +0300 Subject: [PATCH 030/288] test: add tests for node class initialization Implement comprehensive test suite for Node class functionality: - Test basic initialization and attribute setting - Validate URL parsing with Node.from_url method - Add error handling tests for invalid URL inputs - Ensure correct URL generation with url() method --- tests/__init__.py | 0 tests/node_test.py | 41 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 tests/__init__.py create mode 100644 tests/node_test.py diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/node_test.py b/tests/node_test.py new file mode 100644 index 0000000..1f98b37 --- /dev/null +++ b/tests/node_test.py @@ -0,0 +1,41 @@ +import pytest + +from src.typesense.configuration import Node +from src.typesense.exceptions import ConfigError + + +def test_node_initialization(): + node = Node(host="localhost", port=8108, path="/path", protocol="http") + assert node.host == "localhost" + assert node.port == 8108 + assert node.path == "/path" + assert node.protocol == "http" + assert node.healthy is True + + +def test_node_from_url(): + node = Node.from_url("http://localhost:8108/path") + assert node.host == "localhost" + assert node.port == 8108 + assert node.path == "/path" + assert node.protocol == "http" + + +def test_node_from_url_missing_hostname(): + with pytest.raises(ConfigError, match="Node URL does not contain the host name."): + Node.from_url("http://:8108/path") + + +def test_node_from_url_missing_port(): + with pytest.raises(ConfigError, match="Node URL does not contain the port."): + Node.from_url("http://localhost:/path") + + +def test_node_from_url_missing_scheme(): + with pytest.raises(ConfigError, match="Node URL does not contain the protocol."): + Node.from_url("//localhost:8108/path") + + +def test_node_url(): + node = Node(host="localhost", port=8108, path="/path", protocol="http") + assert node.url() == "http://localhost:8108/path" From 50f43994c6e73321fd2cedb8b2323df003b9752d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:08:18 +0300 Subject: [PATCH 031/288] feat(configuration): add type hinting for configuration class - Add TypedDict classes for node and config dictionaries. - Improve typehints throughout the Configuration and Node classes. - Include detailed docstrings for new TypedDict classes to clarify expected attributes. --- src/typesense/configuration.py | 76 ++++++++++++++++++++++++++++++---- 1 file changed, 69 insertions(+), 7 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 30eb870..73c43ba 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -1,9 +1,71 @@ -from typing import Literal +from __future__ import annotations + +from typing import Literal, NotRequired, TypedDict, Union from .exceptions import ConfigError from .logger import logger from urllib.parse import urlparse +class NodeConfigDict(TypedDict): + """ + A dictionary that represents the configuration for a node in the Typesense cluster. + + Attributes: + host (str): The host name of the node. + port (int): The port number of the node. + path (str, optional): The path of the node. + protocol (Literal['http', 'https'] | str): The protocol of the node. + """ + + host: str + port: int + path: NotRequired[str] + protocol: Literal['http', 'https'] | str + + +class ConfigDict(TypedDict): + """ + A dictionary that represents the configuration for the Typesense client. + + Attributes: + nodes (list[Union[str, NodeConfigDict]]): A list of dictionaries or URLs that + represent the nodes in the cluster. + + nearest_node (Union[str, NodeConfigDict]): A dictionary or URL + that represents the nearest node to the client. + + api_key (str): The API key to use for authentication. + + num_retries (int): The number of retries to attempt before failing. + + interval_seconds (int): The interval in seconds between retries. + + healthcheck_interval_seconds (int): The interval in seconds between + health checks. + + verify (bool): Whether to verify the SSL certificate. + + timeout_seconds (int, deprecated): The connection timeout in seconds. + + master_node (Union[str, NodeConfigDict], deprecated): A dictionary or + URL that represents the master node. + + read_replica_nodes (list[Union[str, NodeConfigDict]], deprecated): A list of + dictionaries or URLs that represent the read replica nodes. + """ + + nodes: list[Union[str, NodeConfigDict]] + nearest_node: NotRequired[Union[str, NodeConfigDict]] + api_key: str + num_retries: NotRequired[int] + interval_seconds: NotRequired[int] + healthcheck_interval_seconds: NotRequired[int] + verify: NotRequired[bool] + timeout_seconds: NotRequired[int] # deprecated + master_node: NotRequired[Union[str, NodeConfigDict]] # deprecated + read_replica_nodes: NotRequired[list[Union[str, NodeConfigDict]]] # deprecated + + class Node(object): def __init__( self, @@ -11,7 +73,7 @@ def __init__( port: str | int, path: str, protocol: Literal['http', 'https'] | str, - ): + ) -> None: self.host = host self.port = port self.path = path @@ -32,16 +94,16 @@ def from_url(cls, url: str) -> 'Node': return cls(parsed.hostname, parsed.port, parsed.path, parsed.scheme) - def url(self): + def url(self) -> str: return '{0}://{1}:{2}{3}'.format(self.protocol, self.host, self.port, self.path) class Configuration(object): - def __init__(self, config_dict): + def __init__(self, config_dict: ConfigDict) -> None: Configuration.show_deprecation_warnings(config_dict) Configuration.validate_config_dict(config_dict) - self.nodes = [] + self.nodes: list[Node] = [] for node_config in config_dict.get('nodes', []): if isinstance(node_config, str): node = Node.from_url(node_config) @@ -65,7 +127,7 @@ def __init__(self, config_dict): self.verify = config_dict.get("verify", True) @staticmethod - def validate_config_dict(config_dict): + def validate_config_dict(config_dict: ConfigDict) -> None: nodes = config_dict.get('nodes', None) if not nodes: raise ConfigError('`nodes` is not defined.') @@ -92,7 +154,7 @@ def validate_node_fields(node): return expected_fields.issubset(node) @staticmethod - def show_deprecation_warnings(config_dict): + def show_deprecation_warnings(config_dict: ConfigDict) -> None: if config_dict.get('timeout_seconds'): logger.warn('Deprecation warning: timeout_seconds is now renamed to connection_timeout_seconds') From 92a084aa95aef4582b57ea6c766c4f74fe522e06 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:09:07 +0300 Subject: [PATCH 032/288] style: add return type for node tests --- tests/node_test.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/node_test.py b/tests/node_test.py index 1f98b37..9e0ac33 100644 --- a/tests/node_test.py +++ b/tests/node_test.py @@ -4,7 +4,7 @@ from src.typesense.exceptions import ConfigError -def test_node_initialization(): +def test_node_initialization() -> None: node = Node(host="localhost", port=8108, path="/path", protocol="http") assert node.host == "localhost" assert node.port == 8108 @@ -13,7 +13,7 @@ def test_node_initialization(): assert node.healthy is True -def test_node_from_url(): +def test_node_from_url() -> None: node = Node.from_url("http://localhost:8108/path") assert node.host == "localhost" assert node.port == 8108 @@ -21,21 +21,21 @@ def test_node_from_url(): assert node.protocol == "http" -def test_node_from_url_missing_hostname(): +def test_node_from_url_missing_hostname() -> None: with pytest.raises(ConfigError, match="Node URL does not contain the host name."): Node.from_url("http://:8108/path") -def test_node_from_url_missing_port(): +def test_node_from_url_missing_port() -> None: with pytest.raises(ConfigError, match="Node URL does not contain the port."): Node.from_url("http://localhost:/path") -def test_node_from_url_missing_scheme(): +def test_node_from_url_missing_scheme() -> None: with pytest.raises(ConfigError, match="Node URL does not contain the protocol."): Node.from_url("//localhost:8108/path") -def test_node_url(): +def test_node_url() -> None: node = Node(host="localhost", port=8108, path="/path", protocol="http") assert node.url() == "http://localhost:8108/path" From 76f92222196a7fdc01714c9edffe0be6208dbf22 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:10:26 +0300 Subject: [PATCH 033/288] tests(configuration): add tests for configuration class Implement unit tests for Configuration class methods: - Test node field validation for URLs and dictionaries - Verify deprecation warnings for outdated config fields - Ensure proper handling of valid and invalid node configurations --- tests/configuration_test.py | 83 +++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 tests/configuration_test.py diff --git a/tests/configuration_test.py b/tests/configuration_test.py new file mode 100644 index 0000000..7818726 --- /dev/null +++ b/tests/configuration_test.py @@ -0,0 +1,83 @@ +import pytest + +from src.typesense.configuration import ConfigDict, Configuration + + +def test_validate_node_fields_with_url() -> None: + """Test validate_node_fields with a URL string.""" + assert Configuration.validate_node_fields("http://localhost:8108/path") + + +def test_validate_node_fields_with_valid_dict() -> None: + """Test validate_node_fields with a valid dictionary.""" + assert Configuration.validate_node_fields( + {"host": "localhost", "port": 8108, "protocol": "http"} + ) + + +def test_validate_node_fields_with_invalid_dict() -> None: + """Test validate_node_fields with an invalid dictionary.""" + assert not Configuration.validate_node_fields( + { # type: ignore[arg-type] + "host": "localhost", + "port": 8108, + } + ) + + +def test_deprecation_warning_timeout_seconds(caplog: pytest.LogCaptureFixture) -> None: + """ + Test that a deprecation warning is issued for the 'timeout_seconds' field. + """ + config_dict: ConfigDict = { + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "timeout_seconds": 10, + } + Configuration.show_deprecation_warnings(config_dict) + assert ( + ' '.join( + [ + "Deprecation warning: timeout_seconds is now renamed", + "to connection_timeout_seconds", + ] + ) + in caplog.text + ) + + +def test_deprecation_warning_master_node(caplog: pytest.LogCaptureFixture) -> None: + """ + Test that a deprecation warning is issued for the 'master_node' field. + """ + config_dict: ConfigDict = { + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "master_node": "http://localhost:8108", + } + Configuration.show_deprecation_warnings(config_dict) + assert ( + "Deprecation warning: master_node is now consolidated to nodes" in caplog.text + ) + + +@pytest.mark.filterwarnings("ignore:Deprecation warning") +def test_deprecation_warning_read_replica_nodes( + caplog: pytest.LogCaptureFixture, +) -> None: + """ + Test that a deprecation warning is issued for the 'read_replica_nodes' field. + """ + config_dict: ConfigDict = { + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "read_replica_nodes": ["http://localhost:8109"], + } + Configuration.show_deprecation_warnings(config_dict) + assert ( + "Deprecation warning: read_replica_nodes is now consolidated to nodes" + in caplog.text + ) From 5ab087122a7e4791471194d26aeacf027040d78e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:11:35 +0300 Subject: [PATCH 034/288] feat(exceptions): add type hints to exception args Add specific type annotations for __init__ method parameters in TypesenseClientError class. This enhances type checking and improves code clarity for error handling. --- src/typesense/exceptions.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/typesense/exceptions.py b/src/typesense/exceptions.py index 26f7cfd..de5085d 100644 --- a/src/typesense/exceptions.py +++ b/src/typesense/exceptions.py @@ -1,5 +1,10 @@ +from __future__ import annotations + +from typing import Any + + class TypesenseClientError(IOError): - def __init__(self, *args, **kwargs): + def __init__(self, *args: object, **kwargs: dict[Any, Any]) -> None: super(TypesenseClientError, self).__init__(*args, **kwargs) From 31d5820afa1e5d15ce004eb00324e78bd34fbd8e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:13:28 +0300 Subject: [PATCH 035/288] chore: add mypy config Introduce strict Mypy settings to enhance type checking. Enable additional error codes, set explicit package bases, and ignore missing imports. This improves code quality and catches potential issues early. --- setup.cfg | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 41a61e6..859a8b7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,4 +4,21 @@ max-line-length = 160 [metadata] license_file = LICENSE -[isort] \ No newline at end of file +[isort][isort] + +[mypy] +# Mypy configuration: +# https://mypy.readthedocs.io/en/latest/config_file.html +enable_error_code = + truthy-bool, + truthy-iterable, + redundant-expr, + unused-awaitable, + ignore-without-code, + possibly-undefined, + redundant-self, + +explicit_package_bases = true +ignore_missing_imports = true +strict = true +warn_unreachable = true From 83999192dcd2e5b33a8a3fd5ff135b4c2eca4053 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 26 Jul 2024 17:16:16 +0300 Subject: [PATCH 036/288] chore: add isort config Add isort configuration to setup.cfg for consistent import sorting. --- requirements.txt | 1 + setup.cfg | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index d79b503..8595ce9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ chardet==3.0.4 charset-normalizer==3.3.2 idna==2.8 iniconfig==2.0.0 +isort==5.13.2 mypy==1.11.0 mypy-extensions==1.0.0 packaging==24.1 diff --git a/setup.cfg b/setup.cfg index 859a8b7..6ba6a5a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,7 +4,15 @@ max-line-length = 160 [metadata] license_file = LICENSE -[isort][isort] +[isort] +# isort configuration: +# https://github.com/PyCQA/isort/wiki/isort-Settings +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 88 [mypy] # Mypy configuration: From 5d6c915b95570a94e74228a99ca262fb802ded89 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:33:13 +0300 Subject: [PATCH 037/288] style(tests): node tests styling updates - Update import statements for Node and ConfigError - Implement more specific error messages for URL parsing failures - Ensure consistent docstring formatting across test functions --- tests/node_test.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/node_test.py b/tests/node_test.py index 9e0ac33..b186833 100644 --- a/tests/node_test.py +++ b/tests/node_test.py @@ -1,10 +1,13 @@ +"""Tests for the Node class.""" + import pytest -from src.typesense.configuration import Node -from src.typesense.exceptions import ConfigError +from typesense.configuration import Node +from typesense.exceptions import ConfigError def test_node_initialization() -> None: + """Test the initialization of the Node class using an object.""" node = Node(host="localhost", port=8108, path="/path", protocol="http") assert node.host == "localhost" assert node.port == 8108 @@ -14,6 +17,7 @@ def test_node_initialization() -> None: def test_node_from_url() -> None: + """Test the initialization of the Node class using a URL.""" node = Node.from_url("http://localhost:8108/path") assert node.host == "localhost" assert node.port == 8108 @@ -22,20 +26,24 @@ def test_node_from_url() -> None: def test_node_from_url_missing_hostname() -> None: + """Test the initialization of the Node class using a URL without a host name.""" with pytest.raises(ConfigError, match="Node URL does not contain the host name."): Node.from_url("http://:8108/path") def test_node_from_url_missing_port() -> None: + """Test the initialization of the Node class using a URL without a port.""" with pytest.raises(ConfigError, match="Node URL does not contain the port."): Node.from_url("http://localhost:/path") def test_node_from_url_missing_scheme() -> None: + """Test the initialization of the Node class using a URL without a scheme.""" with pytest.raises(ConfigError, match="Node URL does not contain the protocol."): Node.from_url("//localhost:8108/path") def test_node_url() -> None: + """Test the URL method of the Node class.""" node = Node(host="localhost", port=8108, path="/path", protocol="http") assert node.url() == "http://localhost:8108/path" From 1ef6ac7ca3dab31d8b5faaa114061f7931db769a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:39:21 +0300 Subject: [PATCH 038/288] docs(configuration): add docstrings for configuration methods - Add comprehensive module-level docstring - Expand Node class with detailed method docstrings - Expand Configuration class with detailed method docstrings --- src/typesense/configuration.py | 93 +++++++++++++++++++++++++++++++++- 1 file changed, 91 insertions(+), 2 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 73c43ba..112ebd7 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -1,10 +1,27 @@ +""" +This module provides configuration management for the Typesense Instance. + +Classes: + - Config: Handles loading and accessing configuration settings. + - Node: Represents a node in the Typesense cluster. + +Functions: + - load_config: Loads configuration from a file. + - get_setting: Retrieves a specific setting from the configuration. + - set_setting: Updates a specific setting in the configuration. + +Exceptions: + - ConfigError: Custom exception for configuration-related errors. +""" + from __future__ import annotations from typing import Literal, NotRequired, TypedDict, Union -from .exceptions import ConfigError -from .logger import logger from urllib.parse import urlparse +from typesense.exceptions import ConfigError +from typesense.logger import logger + class NodeConfigDict(TypedDict): """ @@ -67,6 +84,17 @@ class ConfigDict(TypedDict): class Node(object): + """ + Class for representing a node in the Typesense cluster. + + Attributes: + host (str): The host name of the node. + port (str | int): The port number of the node. + path (str): The path of the node. + protocol (Literal['http', 'https'] | str): The protocol of the node. + healthy (bool): Whether the node is healthy or not. + """ + def __init__( self, host: str, @@ -74,6 +102,15 @@ def __init__( path: str, protocol: Literal['http', 'https'] | str, ) -> None: + """ + Initialize a Node object with the specified host, port, path, and protocol. + + Args: + host (str): The host name of the node. + port (str | int): The port number of the node. + path (str): The path of the node. + protocol (Literal['http', 'https'] | str): The protocol of the node. + """ self.host = host self.port = port self.path = path @@ -84,6 +121,18 @@ def __init__( @classmethod def from_url(cls, url: str) -> 'Node': + """ + Initialize a Node object from a URL string. + + Args: + url (str): The URL string to parse. + + Returns: + Node: The Node object created from the URL string. + + Raises: + ConfigError: If the URL does not contain the host name, port number, or protocol. + """ parsed = urlparse(url) if not parsed.hostname: raise ConfigError('Node URL does not contain the host name.') @@ -96,12 +145,36 @@ def from_url(cls, url: str) -> 'Node': def url(self) -> str: return '{0}://{1}:{2}{3}'.format(self.protocol, self.host, self.port, self.path) + """ + Generate the URL of the node. + Returns: + str: The URL of the node + """ + """ + Class for managing the configuration settings for the Typesense client. + + Attributes: + nodes (list[Node]): A list of Node objects representing the nodes in the cluster. + nearest_node (Node | None): The nearest node to the client. + api_key (str): The API key to use for authentication. + connection_timeout_seconds (float): The connection timeout in seconds. + num_retries (int): The number of retries to attempt before failing. + retry_interval_seconds (float): The interval in seconds between retries. + healthcheck_interval_seconds (int): The interval in seconds between health checks. + verify (bool): Whether to verify the SSL certificate. + """ class Configuration(object): def __init__(self, config_dict: ConfigDict) -> None: Configuration.show_deprecation_warnings(config_dict) Configuration.validate_config_dict(config_dict) + """ + Initialize a Configuration object with the specified configuration settings. + + Args: + config_dict (ConfigDict): A dictionary containing the configuration settings. + """ self.nodes: list[Node] = [] for node_config in config_dict.get('nodes', []): @@ -148,6 +221,15 @@ def validate_config_dict(config_dict: ConfigDict) -> None: @staticmethod def validate_node_fields(node): + """ + Validate the fields of a node in the configuration dictionary. + + Args: + node (str | NodeConfigDict): The node to validate. + + Returns: + bool: True if the node is valid, False otherwise. + """ if isinstance(node, str): return True expected_fields = {'host', 'port', 'protocol'} @@ -155,6 +237,13 @@ def validate_node_fields(node): @staticmethod def show_deprecation_warnings(config_dict: ConfigDict) -> None: + """ + Show deprecation warnings for deprecated configuration fields. + + Args: + config_dict (ConfigDict): The configuration dictionary + to check for deprecated fields. + """ if config_dict.get('timeout_seconds'): logger.warn('Deprecation warning: timeout_seconds is now renamed to connection_timeout_seconds') From d2cbb1ce68901aa60ad66d7b20eb09a3a1502833 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:40:13 +0300 Subject: [PATCH 039/288] fix(configuration): add type hinting for configuration validation method --- src/typesense/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 112ebd7..4a2e95a 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -220,7 +220,7 @@ def validate_config_dict(config_dict: ConfigDict) -> None: 'host, port, protocol') @staticmethod - def validate_node_fields(node): + def validate_node_fields(node: str | NodeConfigDict) -> bool: """ Validate the fields of a node in the configuration dictionary. From 9cfd4cd594f544d533f93ba095d87e348831a1c6 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:46:09 +0300 Subject: [PATCH 040/288] refactor(configuration): remove redudant `object` inheritance from classes --- src/typesense/configuration.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 4a2e95a..5b0a5f8 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -83,7 +83,7 @@ class ConfigDict(TypedDict): read_replica_nodes: NotRequired[list[Union[str, NodeConfigDict]]] # deprecated -class Node(object): +class Node: """ Class for representing a node in the Typesense cluster. @@ -151,6 +151,8 @@ def url(self) -> str: Returns: str: The URL of the node """ + +class Configuration: """ Class for managing the configuration settings for the Typesense client. @@ -165,7 +167,6 @@ def url(self) -> str: verify (bool): Whether to verify the SSL certificate. """ -class Configuration(object): def __init__(self, config_dict: ConfigDict) -> None: Configuration.show_deprecation_warnings(config_dict) Configuration.validate_config_dict(config_dict) From f48b53ca748fbfa9aebc1faaa9ba1775827109e0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:55:05 +0300 Subject: [PATCH 041/288] refactor(configuration): add node initialization method for configuration - Extract node initialization logic into separate methods - Improve handling of nearest node configuration - Reduce code duplication in node and nearest node processing --- src/typesense/configuration.py | 57 ++++++++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 13 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 5b0a5f8..f62812a 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -177,29 +177,60 @@ def __init__(self, config_dict: ConfigDict) -> None: config_dict (ConfigDict): A dictionary containing the configuration settings. """ - self.nodes: list[Node] = [] - for node_config in config_dict.get('nodes', []): - if isinstance(node_config, str): - node = Node.from_url(node_config) - else: - node = Node(node_config['host'], node_config['port'], node_config.get('path', ''), node_config['protocol']) - self.nodes.append(node) + self.nodes: list[Node] = [ + self._initialize_nodes(node) for node in config_dict['nodes'] + ] nearest_node = config_dict.get('nearest_node', None) - if not nearest_node: - self.nearest_node = None - elif isinstance(nearest_node, str): - self.nearest_node = Node.from_url(nearest_node) - else: - self.nearest_node = Node(nearest_node['host'], nearest_node['port'], nearest_node.get('path', ''), nearest_node['protocol']) self.api_key = config_dict.get('api_key', '') self.connection_timeout_seconds = config_dict.get('connection_timeout_seconds', 3.0) + self.nearest_node = self._handle_nearest_node(nearest_node) self.num_retries = config_dict.get('num_retries', 3) self.retry_interval_seconds = config_dict.get('retry_interval_seconds', 1.0) self.healthcheck_interval_seconds = config_dict.get('healthcheck_interval_seconds', 60) self.verify = config_dict.get("verify", True) + def _handle_nearest_node( + self, + nearest_node: Union[str, NodeConfigDict, None], + ) -> Union[Node, None]: + """ + Handle the nearest node configuration. + + Args: + nearest_node (str | NodeConfigDict): The nearest node configuration. + + Returns: + Node | None: The nearest node object if it exists, None otherwise. + """ + if nearest_node is None: + return None + return self._initialize_nodes(nearest_node) + + def _initialize_nodes( + self, + node: Union[str, NodeConfigDict], + ) -> Node: + """ + Handle the initialization of a node. + + Args: + node (Node): The node to initialize. + + Returns: + Node: The initialized node. + """ + if isinstance(node, str): + return Node.from_url(node) + + return Node( + node['host'], + node['port'], + node.get('path', ' '), + node['protocol'], + ) + @staticmethod def validate_config_dict(config_dict: ConfigDict) -> None: nodes = config_dict.get('nodes', None) From 5a2eadf5b5b9ec901c0f0ba4a078390edf814818 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 14:56:38 +0300 Subject: [PATCH 042/288] style(configuration): use f string instead of format - Improve the `url` method in the `Node` class to use an f-string for better readability and consistency with modern Python practices. --- src/typesense/configuration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index f62812a..3bed439 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -144,13 +144,14 @@ def from_url(cls, url: str) -> 'Node': return cls(parsed.hostname, parsed.port, parsed.path, parsed.scheme) def url(self) -> str: - return '{0}://{1}:{2}{3}'.format(self.protocol, self.host, self.port, self.path) """ Generate the URL of the node. Returns: str: The URL of the node """ + return f'{self.protocol}://{self.host}:{self.port}{self.path}' + class Configuration: """ From df5e639bdc8657f2c041c1f3088bcb3ef931e911 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 15:01:19 +0300 Subject: [PATCH 043/288] style(configuration): format to adhere to black's default formatting Black formatter's default line width rule is '+10%' (88 columns). https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length - Format the file accordingly to Black's recommended line width --- src/typesense/configuration.py | 68 ++++++++++++++++++++++++++++------ 1 file changed, 56 insertions(+), 12 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 3bed439..659246b 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -168,9 +168,12 @@ class Configuration: verify (bool): Whether to verify the SSL certificate. """ - def __init__(self, config_dict: ConfigDict) -> None: Configuration.show_deprecation_warnings(config_dict) Configuration.validate_config_dict(config_dict) + def __init__( + self, + config_dict: ConfigDict, + ) -> None: """ Initialize a Configuration object with the specified configuration settings. @@ -184,12 +187,18 @@ def __init__(self, config_dict: ConfigDict) -> None: nearest_node = config_dict.get('nearest_node', None) - self.api_key = config_dict.get('api_key', '') - self.connection_timeout_seconds = config_dict.get('connection_timeout_seconds', 3.0) self.nearest_node = self._handle_nearest_node(nearest_node) + self.api_key = config_dict.get('api_key', ' ') + self.connection_timeout_seconds = config_dict.get( + 'connection_timeout_seconds', + 3.0, + ) self.num_retries = config_dict.get('num_retries', 3) self.retry_interval_seconds = config_dict.get('retry_interval_seconds', 1.0) - self.healthcheck_interval_seconds = config_dict.get('healthcheck_interval_seconds', 60) + self.healthcheck_interval_seconds = config_dict.get( + 'healthcheck_interval_seconds', + 60, + ) self.verify = config_dict.get("verify", True) def _handle_nearest_node( @@ -243,14 +252,28 @@ def validate_config_dict(config_dict: ConfigDict) -> None: raise ConfigError('`api_key` is not defined.') for node in nodes: - if not Configuration.validate_node_fields(node): - raise ConfigError('`node` entry must be a URL string or a dictionary with the following required keys: ' - 'host, port, protocol') + if not ConfigurationValidations.validate_node_fields(node): + raise ConfigError( + ' '.join( + [ + '`node` entry must be a URL string or a', + 'dictionary with the following required keys:', + 'host, port, protocol', + ], + ), + ) nearest_node = config_dict.get('nearest_node', None) if nearest_node and not Configuration.validate_node_fields(nearest_node): - raise ConfigError('`nearest_node` entry must be a URL string or a dictionary with the following required keys: ' - 'host, port, protocol') + raise ConfigError( + ' '.join( + [ + '`nearest_node` entry must be a URL string or a dictionary', + 'with the following required keys:', + 'host, port, protocol', + ], + ), + ) @staticmethod def validate_node_fields(node: str | NodeConfigDict) -> bool: @@ -278,10 +301,31 @@ def show_deprecation_warnings(config_dict: ConfigDict) -> None: to check for deprecated fields. """ if config_dict.get('timeout_seconds'): - logger.warn('Deprecation warning: timeout_seconds is now renamed to connection_timeout_seconds') + logger.warn( + ' '.join( + [ + 'Deprecation warning: timeout_seconds is now renamed', + 'to connection_timeout_seconds', + ], + ), + ) if config_dict.get('master_node'): - logger.warn('Deprecation warning: master_node is now consolidated to nodes, starting with Typesense Server v0.12') + logger.warn( + ' '.join( + [ + 'Deprecation warning: master_node is now consolidated', + 'to nodes,starting with Typesense Server v0.12', + ], + ), + ) if config_dict.get('read_replica_nodes'): - logger.warn('Deprecation warning: read_replica_nodes is now consolidated to nodes, starting with Typesense Server v0.12') + logger.warn( + ' '.join( + [ + 'Deprecation warning: read_replica_nodes is now', + 'consolidated to nodes, starting with Typesense Server v0.12', + ], + ), + ) From 8a51d818707e8715d6250c6184b9a21f61d25a44 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 15:03:06 +0300 Subject: [PATCH 044/288] refactor(configuration): add validations class to break up the class' members - Delegate validation to a new `ConfigurationValidations` class - Improve readability by moving validation methods --- src/typesense/configuration.py | 67 ++++++++++++++++++++++++++++++---- 1 file changed, 59 insertions(+), 8 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 659246b..bd5d970 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -168,8 +168,6 @@ class Configuration: verify (bool): Whether to verify the SSL certificate. """ - Configuration.show_deprecation_warnings(config_dict) - Configuration.validate_config_dict(config_dict) def __init__( self, config_dict: ConfigDict, @@ -180,6 +178,9 @@ def __init__( Args: config_dict (ConfigDict): A dictionary containing the configuration settings. """ + self.validations = ConfigurationValidations + self.validations.show_deprecation_warnings(config_dict) + self.validations.validate_config_dict(config_dict) self.nodes: list[Node] = [ self._initialize_nodes(node) for node in config_dict['nodes'] @@ -241,16 +242,56 @@ def _initialize_nodes( node['protocol'], ) + +class ConfigurationValidations: + """Class for validating the configuration dictionary.""" + @staticmethod def validate_config_dict(config_dict: ConfigDict) -> None: - nodes = config_dict.get('nodes', None) - if not nodes: + """ + Validate the configuration dictionary to ensure it contains the required fields. + + Args: + config_dict (ConfigDict): The configuration dictionary to validate. + + Raises: + ConfigError: If the configuration dictionary is missing required fields. + """ + ConfigurationValidations.validate_required_config_fields(config_dict) + ConfigurationValidations.validate_nodes(config_dict['nodes']) + + nearest_node = config_dict.get('nearest_node', None) + if nearest_node: + ConfigurationValidations.validate_nearest_node(nearest_node) + + @staticmethod + def validate_required_config_fields(config_dict: ConfigDict) -> None: + """ + Validate the presence of required fields in the configuration dictionary. + + Args: + config_dict (ConfigDict): The configuration dictionary to validate. + + Raises: + ConfigError: If the configuration dictionary is missing required fields. + """ + if not config_dict.get('nodes'): raise ConfigError('`nodes` is not defined.') - api_key = config_dict.get('api_key', None) - if not api_key: + if not config_dict.get('api_key'): raise ConfigError('`api_key` is not defined.') + @staticmethod + def validate_nodes(nodes: list[Union[str, NodeConfigDict]]) -> None: + """ + Validate the nodes in the configuration dictionary. + + Args: + nodes (list): The list of nodes to validate. + + Raises: + ConfigError: If any node is invalid. + """ for node in nodes: if not ConfigurationValidations.validate_node_fields(node): raise ConfigError( @@ -263,8 +304,18 @@ def validate_config_dict(config_dict: ConfigDict) -> None: ), ) - nearest_node = config_dict.get('nearest_node', None) - if nearest_node and not Configuration.validate_node_fields(nearest_node): + @staticmethod + def validate_nearest_node(nearest_node: Union[str, NodeConfigDict]) -> None: + """ + Validate the nearest node in the configuration dictionary. + + Args: + nearest_node (dict): The nearest node to validate. + + Raises: + ConfigError: If the nearest node is invalid. + """ + if not ConfigurationValidations.validate_node_fields(nearest_node): raise ConfigError( ' '.join( [ From 2ff847a6568377b4ba043dd35e04851a4cb45845 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 15:04:53 +0300 Subject: [PATCH 045/288] feat(tests): add object assertion utility functions Add utility functions for object attribute assertions - Introduce `obj_to_dict` for converting objects to dictionaries - Add `assert_match_object` to check if objects have the same attributes - Implement `assert_to_contain_object` to verify partial attribute matches - Create `assert_object_lists_match` for comparing lists of objects - Include `raise_with_diff` to provide detailed diff in assertion errors chore(tests): register object assertion utils to pytest config --- tests/conftest.py | 5 ++ tests/utils/object_assertions.py | 106 +++++++++++++++++++++++++++++++ 2 files changed, 111 insertions(+) create mode 100644 tests/conftest.py create mode 100644 tests/utils/object_assertions.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..bceef17 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,5 @@ +"""Pytest configuration file.""" + +import pytest + +pytest.register_assert_rewrite("utils.object_assertions") diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py new file mode 100644 index 0000000..e8195a2 --- /dev/null +++ b/tests/utils/object_assertions.py @@ -0,0 +1,106 @@ +"""Utility functions for asserting that objects have the same attribute values.""" + +from __future__ import annotations + +import difflib +from typing import Any, Counter, Iterable, TypeVar + +TObj = TypeVar("TObj", bound=object) + + +def obj_to_dict(input_obj: TObj | dict[str, Any]) -> dict[str, Any]: + """ + Convert an object to a dictionary. + + If the object is already a dictionary, return it as is. + + Args: + input_obj: The object to convert. + + Returns: + The object as a dictionary. + """ + return input_obj if isinstance(input_obj, dict) else input_obj.__dict__ + + +def assert_match_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: + """ + Assert that two objects have the same attribute values. + + Args: + actual: The actual object. + expected: The expected object. + + Raises: + AssertionError: If the objects do not have the same attribute values. + """ + actual_attrs = actual.__dict__ + + expected_attrs = obj_to_dict(expected) + + for key, _ in actual_attrs.items(): + assert key in expected_attrs, f"Attribute {key} not found in expected object" + + if actual_attrs[key] != expected_attrs[key]: + raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) + + +def assert_to_contain_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: + """Assert that two objects have the same attribute values.""" + actual_attrs = actual.__dict__ + + if isinstance(expected, dict): + expected_attrs = expected + else: + expected_attrs = expected.__dict__ + + for key, _ in expected_attrs.items(): + if not isinstance(key, str): + continue + + assert key in actual_attrs, f"Attribute {key} not found in expected object" + + if actual_attrs[key] != expected_attrs[key]: + raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) + + +def assert_object_lists_match( + actual: list[TObj], + expected: list[TObj | dict[str, Any]], +) -> None: + """Assert that two lists of objects have the same attribute values.""" + actual_dicts = [obj_to_dict(actual_obj) for actual_obj in actual] + expected_dicts = [obj_to_dict(expected_obj) for expected_obj in expected] + + actual_counter = Counter( + tuple(sorted(dict_entry.items())) for dict_entry in actual_dicts + ) + expected_counter = Counter( + tuple(sorted(dict_entry.items())) for dict_entry in expected_dicts + ) + if actual_counter != expected_counter: + raise_with_diff(expected_dicts, actual_dicts) + + +def raise_with_diff( + expected_dicts: Iterable[dict[str, Any]], + actual_dicts: Iterable[dict[str, Any]], +) -> None: + """ + Raise an AssertionError with a unified diff of the expected and actual values. + + Args: + expected: The expected value. + actual: The actual value. + """ + expected_str = [str(sorted(dict_entry.items())) for dict_entry in expected_dicts] + actual_str = [str(sorted(dict_entry.items())) for dict_entry in actual_dicts] + diff = difflib.unified_diff( + expected_str, + actual_str, + fromfile='expected', + tofile='actual', + lineterm='', + ) + diff_output = '\n'.join(diff) + raise AssertionError(f"Lists do not contain the same elements:\n{diff_output}") From 12d360b562dd66d2e8a22e4ccb207646522f8e14 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 15:09:25 +0300 Subject: [PATCH 046/288] tests(configuration): add tests for configuration validations - Introduce tests to validate configuration node fields with URLs and dictionaries - Ensure deprecation warnings for outdated fields such as `timeout_seconds`, `master_node`, and `read_replica_nodes` - Test the validate_config_dict function for various scenarios, including missing and incorrect fields - Raise appropriate ConfigError exceptions for invalid configurations --- tests/configuration_validations_test.py | 201 ++++++++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 tests/configuration_validations_test.py diff --git a/tests/configuration_validations_test.py b/tests/configuration_validations_test.py new file mode 100644 index 0000000..d5be683 --- /dev/null +++ b/tests/configuration_validations_test.py @@ -0,0 +1,201 @@ +"""Tests for the ConfigurationValidations class.""" + +import types + +import pytest + +from typesense.configuration import ConfigDict, ConfigurationValidations +from typesense.exceptions import ConfigError + +DEFAULT_NODE = types.MappingProxyType( + {"host": "localhost", "port": 8108, "protocol": "http"}, +) + + +def test_validate_node_fields_with_url() -> None: + """Test validate_node_fields with a URL string.""" + assert ConfigurationValidations.validate_node_fields("http://localhost:8108/path") + + +def test_validate_node_fields_with_valid_dict() -> None: + """Test validate_node_fields with a valid dictionary.""" + assert ConfigurationValidations.validate_node_fields( + DEFAULT_NODE, + ) + + +def test_validate_node_fields_with_invalid_dict() -> None: + """Test validate_node_fields with an invalid dictionary.""" + assert not ConfigurationValidations.validate_node_fields( + { # type: ignore[arg-type] + "host": "localhost", + "port": 8108, + }, + ) + + +def test_deprecation_warning_timeout_seconds(caplog: pytest.LogCaptureFixture) -> None: + """Test that a deprecation warning is issued for the 'timeout_seconds' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "timeout_seconds": 10, + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + assert ( + ' '.join( + [ + "Deprecation warning: timeout_seconds is now renamed", + "to connection_timeout_seconds", + ], + ) + in caplog.text + ) + + +def test_deprecation_warning_master_node(caplog: pytest.LogCaptureFixture) -> None: + """Test that a deprecation warning is issued for the 'master_node' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "master_node": "http://localhost:8108", + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + assert ( + "Deprecation warning: master_node is now consolidated to nodes" in caplog.text + ) + + +def test_deprecation_warning_read_replica_nodes( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that a deprecation warning is issued for the 'read_replica_nodes' field.""" + config_dict: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + "read_replica_nodes": ["http://localhost:8109"], + } + ConfigurationValidations.show_deprecation_warnings(config_dict) + + assert ( + "Deprecation warning: read_replica_nodes is now consolidated to nodes" + ) in caplog.text + + +def test_validate_config_dict() -> None: + """Test validate_config_dict.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_string_nearest_node() -> None: + """Test validate_config_dict with nearest node as a string.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_string_nodes() -> None: + """Test validate_config_dict with nodes as a string.""" + ConfigurationValidations.validate_config_dict( + { + "nodes": "http://localhost:8108", + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_no_nodes() -> None: + """Test validate_config_dict with no nodes.""" + with pytest.raises(ConfigError, match="`nodes` is not defined."): + ConfigurationValidations.validate_config_dict( + { + "nearest_node": "http://localhost:8108", + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_no_api_key() -> None: + """Test validate_config_dict with no api_key.""" + with pytest.raises(ConfigError, match="`api_key` is not defined."): + ConfigurationValidations.validate_config_dict( + { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost:8108", + }, + ) + + +def test_validate_config_dict_with_wrong_node() -> None: + """Test validate_config_dict with wrong node.""" + with pytest.raises( + ConfigError, + match="`node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "wrong_field": "invalid", + }, + ], + "api_key": "xyz", + }, + ) + + +def test_validate_config_dict_with_wrong_nearest_node() -> None: + """Test validate_config_dict with wrong nearest node.""" + with pytest.raises( + ConfigError, + match='`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol', # noqa: B950 + ): + ConfigurationValidations.validate_config_dict( + { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "localhost", + "port": 8108, + "wrong_field": "invalid", + }, + "api_key": "xyz", + }, + ) From af682770d7c32d7b3d940a4016f39823642478ea Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 29 Jul 2024 15:10:40 +0300 Subject: [PATCH 047/288] tests(configuration): add constructor tests for configurations - Test Configuration constructor for default, explicit, and various invalid configurations - Raise appropriate ConfigError exceptions for invalid configurations --- tests/configuration_test.py | 235 ++++++++++++++++++++++++++---------- 1 file changed, 168 insertions(+), 67 deletions(-) diff --git a/tests/configuration_test.py b/tests/configuration_test.py index 7818726..bebc091 100644 --- a/tests/configuration_test.py +++ b/tests/configuration_test.py @@ -1,83 +1,184 @@ +"""Tests for the Configuration class.""" + +import types + import pytest -from src.typesense.configuration import ConfigDict, Configuration +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.configuration import ConfigDict, Configuration, Node +from typesense.exceptions import ConfigError + +DEFAULT_NODE = types.MappingProxyType( + {"host": "localhost", "port": 8108, "protocol": "http"}, +) + + +def test_configuration_defaults() -> None: + """Test the Configuration constructor defaults.""" + config: ConfigDict = { + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + "path": "3", + }, + DEFAULT_NODE, + ], + "nearest_node": DEFAULT_NODE, + "api_key": "xyz", + } + + configuration = Configuration(config) + + nodes = [ + Node(host="localhost", port=8108, protocol="http", path=" "), + Node(host="localhost", port=8108, protocol="http", path="3"), + ] + nearest_node = Node(host="localhost", port=8108, protocol="http", path=" ") + assert_object_lists_match(configuration.nodes, nodes) + + assert_match_object(configuration.nearest_node, nearest_node) + + expected = { + "api_key": "xyz", + "connection_timeout_seconds": 3.0, + "num_retries": 3, + "retry_interval_seconds": 1.0, + "verify": True, + } -def test_validate_node_fields_with_url() -> None: - """Test validate_node_fields with a URL string.""" - assert Configuration.validate_node_fields("http://localhost:8108/path") + assert_to_contain_object(configuration, expected) -def test_validate_node_fields_with_valid_dict() -> None: - """Test validate_node_fields with a valid dictionary.""" - assert Configuration.validate_node_fields( - {"host": "localhost", "port": 8108, "protocol": "http"} - ) +def test_configuration_explicit() -> None: + """Test the Configuration constructor with explicit values.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": DEFAULT_NODE, + "api_key": "xyz", + "connection_timeout_seconds": 5.0, + "num_retries": 5, + "retry_interval_seconds": 2.0, + "verify": False, + } + configuration = Configuration(config) -def test_validate_node_fields_with_invalid_dict() -> None: - """Test validate_node_fields with an invalid dictionary.""" - assert not Configuration.validate_node_fields( - { # type: ignore[arg-type] - "host": "localhost", - "port": 8108, - } - ) + nodes = [Node(host="localhost", port=8108, protocol="http", path=" ")] + nearest_node = Node(host="localhost", port=8108, protocol="http", path=" ") + assert_object_lists_match(configuration.nodes, nodes) + assert_match_object(configuration.nearest_node, nearest_node) -def test_deprecation_warning_timeout_seconds(caplog: pytest.LogCaptureFixture) -> None: - """ - Test that a deprecation warning is issued for the 'timeout_seconds' field. - """ - config_dict: ConfigDict = { - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - "nearest_node": "http://localhost:8108", + expected = { "api_key": "xyz", - "timeout_seconds": 10, + "connection_timeout_seconds": 5.0, + "num_retries": 5, + "retry_interval_seconds": 2.0, + "verify": False, } - Configuration.show_deprecation_warnings(config_dict) - assert ( - ' '.join( - [ - "Deprecation warning: timeout_seconds is now renamed", - "to connection_timeout_seconds", - ] - ) - in caplog.text - ) - - -def test_deprecation_warning_master_node(caplog: pytest.LogCaptureFixture) -> None: - """ - Test that a deprecation warning is issued for the 'master_node' field. - """ - config_dict: ConfigDict = { - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - "nearest_node": "http://localhost:8108", + + assert_to_contain_object(configuration, expected) + + +def test_configuration_no_nearest_node() -> None: + """Test the Configuration constructor with no nearest node.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], "api_key": "xyz", - "master_node": "http://localhost:8108", } - Configuration.show_deprecation_warnings(config_dict) - assert ( - "Deprecation warning: master_node is now consolidated to nodes" in caplog.text - ) - - -@pytest.mark.filterwarnings("ignore:Deprecation warning") -def test_deprecation_warning_read_replica_nodes( - caplog: pytest.LogCaptureFixture, -) -> None: - """ - Test that a deprecation warning is issued for the 'read_replica_nodes' field. - """ - config_dict: ConfigDict = { - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - "nearest_node": "http://localhost:8108", + + configuration = Configuration(config) + + nodes = Node(host="localhost", port=8108, protocol="http", path=" ") + + for node in configuration.nodes: + assert_match_object(node, nodes) + + expected = { "api_key": "xyz", - "read_replica_nodes": ["http://localhost:8109"], + "connection_timeout_seconds": 3.0, + "num_retries": 3, + "retry_interval_seconds": 1.0, + "verify": True, + "nearest_node": None, } - Configuration.show_deprecation_warnings(config_dict) - assert ( - "Deprecation warning: read_replica_nodes is now consolidated to nodes" - in caplog.text - ) + assert_to_contain_object(configuration, expected) + + +def test_configuration_empty_nodes() -> None: + """Test the Configuration constructor with empty nodes.""" + config: ConfigDict = { + "nodes": [], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`nodes` is not defined.", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_node() -> None: + """Test the Configuration constructor with an invalid node.""" + config: ConfigDict = { + "nodes": [{"host": "localhost"}], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_node_url() -> None: + """Test the Configuration constructor with an invalid node as a url.""" + config: ConfigDict = { + "nodes": ["http://localhost"], + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="Node URL does not contain the port.", + ): + Configuration(config) + + +def test_configuration_invalid_nearest_node() -> None: + """Test the Configuration constructor with an invalid nearest node.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": {"host": "localhost"}, + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 + ): + Configuration(config) + + +def test_configuration_invalid_nearest_node_url() -> None: + """Test the Configuration constructor with an invalid nearest node as a url.""" + config: ConfigDict = { + "nodes": [DEFAULT_NODE], + "nearest_node": "http://localhost", + "api_key": "xyz", + } + + with pytest.raises( + ConfigError, + match="Node URL does not contain the port.", + ): + Configuration(config) From 211345ecb7e0a0f003db5a05da0c1fcf43a25f1a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 30 Jul 2024 10:55:30 +0300 Subject: [PATCH 048/288] refactor(tests): use object assertion util functions for node tests --- tests/node_test.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/tests/node_test.py b/tests/node_test.py index b186833..6b365a6 100644 --- a/tests/node_test.py +++ b/tests/node_test.py @@ -2,6 +2,7 @@ import pytest +from tests.utils.object_assertions import assert_match_object, assert_to_contain_object from typesense.configuration import Node from typesense.exceptions import ConfigError @@ -9,20 +10,29 @@ def test_node_initialization() -> None: """Test the initialization of the Node class using an object.""" node = Node(host="localhost", port=8108, path="/path", protocol="http") - assert node.host == "localhost" - assert node.port == 8108 - assert node.path == "/path" - assert node.protocol == "http" - assert node.healthy is True + + expected = { + "host": "localhost", + "port": 8108, + "path": "/path", + "protocol": "http", + "healthy": True, + } + assert_match_object(node, expected) def test_node_from_url() -> None: """Test the initialization of the Node class using a URL.""" node = Node.from_url("http://localhost:8108/path") - assert node.host == "localhost" - assert node.port == 8108 - assert node.path == "/path" - assert node.protocol == "http" + + expected = { + "host": "localhost", + "port": 8108, + "path": "/path", + "protocol": "http", + "healthy": True, + } + assert_match_object(node, expected) def test_node_from_url_missing_hostname() -> None: From 839d1a953b685ff57ea375723193a73d63f080e0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 30 Jul 2024 10:57:06 +0300 Subject: [PATCH 049/288] feat(node): add last access timestamp to node class - Introduce last_access_ts attribute to Node class for tracking accesstime. - Initialize it to the current time's UNIX timestamp. tests(node): update node tests to include last access timestamp --- src/typesense/configuration.py | 4 ++++ tests/node_test.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index bd5d970..2385545 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -16,6 +16,7 @@ from __future__ import annotations +import time from typing import Literal, NotRequired, TypedDict, Union from urllib.parse import urlparse @@ -119,6 +120,9 @@ def __init__( # Used to skip bad hosts self.healthy = True + # Used to track the last time this node was accessed + self.last_access_ts: int = int(time.time()) + @classmethod def from_url(cls, url: str) -> 'Node': """ diff --git a/tests/node_test.py b/tests/node_test.py index 6b365a6..2cfac4f 100644 --- a/tests/node_test.py +++ b/tests/node_test.py @@ -1,5 +1,7 @@ """Tests for the Node class.""" +import time + import pytest from tests.utils.object_assertions import assert_match_object, assert_to_contain_object @@ -11,12 +13,14 @@ def test_node_initialization() -> None: """Test the initialization of the Node class using an object.""" node = Node(host="localhost", port=8108, path="/path", protocol="http") + current_time = int(time.time()) expected = { "host": "localhost", "port": 8108, "path": "/path", "protocol": "http", "healthy": True, + "last_access_ts": current_time, } assert_match_object(node, expected) @@ -25,12 +29,14 @@ def test_node_from_url() -> None: """Test the initialization of the Node class using a URL.""" node = Node.from_url("http://localhost:8108/path") + current_time = int(time.time()) expected = { "host": "localhost", "port": 8108, "path": "/path", "protocol": "http", "healthy": True, + "last_access_ts": current_time, } assert_match_object(node, expected) From 111118dda9014dc4fe029db37c93b4b106bf785e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:04:21 +0300 Subject: [PATCH 050/288] feat(api_call): add type hints to api call class - Enhance type safety in `ApiCall` with type annotations - Update method return types and parameters --- src/typesense/api_call.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 8b49e32..b5c9592 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -7,35 +7,36 @@ ObjectNotFound, ObjectUnprocessable, RequestMalformed, RequestUnauthorized, RequestForbidden, ServerError, ServiceUnavailable, TypesenseClientError) +from typesense.configuration import Configuration, Node from .logger import logger session = requests.session() class ApiCall(object): API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' - def __init__(self, config): + def __init__(self, config: Configuration): self.config = config self.nodes = copy.deepcopy(self.config.nodes) self.node_index = 0 self._initialize_nodes() - def _initialize_nodes(self): + def _initialize_nodes(self) -> None: if self.config.nearest_node: self.set_node_healthcheck(self.config.nearest_node, True) for node in self.nodes: self.set_node_healthcheck(node, True) - def node_due_for_health_check(self, node): + def node_due_for_health_check(self, node: Node) -> bool: current_epoch_ts = int(time.time()) - due_for_check = (current_epoch_ts - node.last_access_ts) > self.config.healthcheck_interval_seconds + due_for_check: bool = (current_epoch_ts - node.last_access_ts) > self.config.healthcheck_interval_seconds if due_for_check: logger.debug('Node {}:{} is due for health check.'.format(node.host, node.port)) return due_for_check # Returns a healthy host from the pool in a round-robin fashion. # Might return an unhealthy host periodically to check for recovery. - def get_node(self): + def get_node(self) -> Node: if self.config.nearest_node: if self.config.nearest_node.healthy or self.node_due_for_health_check(self.config.nearest_node): logger.debug('Using nearest node.') @@ -58,7 +59,7 @@ def get_node(self): return self.nodes[self.node_index] @staticmethod - def get_exception(http_code): + def get_exception(http_code: int) -> type[TypesenseClientError]: if http_code == 0: return HTTPStatus0Error elif http_code == 400: @@ -129,7 +130,7 @@ def make_request(self, fn, endpoint, as_json, **kwargs): logger.debug('No retries left. Raising last exception: {}'.format(last_exception)) raise last_exception - def set_node_healthcheck(self, node, is_healthy): + def set_node_healthcheck(self, node: Node, is_healthy: bool) -> None: node.healthy = is_healthy node.last_access_ts = int(time.time()) @@ -153,7 +154,6 @@ def post(self, endpoint, body, params=None, as_json=True): return self.make_request(session.post, endpoint, as_json, params=params, data=body, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - def put(self, endpoint, body, params=None): return self.make_request(session.put, endpoint, True, params=params, data=body, From c914cedfd5dd226f1bde4332279d7820240c326b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:10:52 +0300 Subject: [PATCH 051/288] feat(api_call): add generics to class definition and methods - Add three generic variables to ensure correct type inference on `ApiCall` users - Add `TypedDict` for request fucntion keyword arguments - Add overloads for methods returning either strings or json from request response, to ensure correct types are inferred when called --- src/typesense/api_call.py | 117 ++++++++++++++++++++++++++++++++++---- 1 file changed, 106 insertions(+), 11 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index b5c9592..0097fc9 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -1,6 +1,17 @@ import copy import json import time +from typing import ( + Any, + Callable, + Generic, + Literal, + NotRequired, + TypedDict, + TypeVar, + Unpack, + overload, +) import requests from .exceptions import (HTTPStatus0Error, ObjectAlreadyExists, @@ -10,8 +21,19 @@ from typesense.configuration import Configuration, Node from .logger import logger session = requests.session() +TParams = TypeVar('TParams', bound=dict[str, Any]) +TBody = TypeVar('TBody', bound=dict[str, Any]) +TEntityDict = TypeVar('TEntityDict') -class ApiCall(object): + +class SessionFunctionKwargs(Generic[TParams, TBody], TypedDict): + params: NotRequired[TParams | None] + data: NotRequired[TBody | str] + timeout: float + verify: bool + + +class ApiCall(Generic[TEntityDict, TParams, TBody]): API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' def __init__(self, config: Configuration): @@ -81,8 +103,32 @@ def get_exception(http_code: int) -> type[TypesenseClientError]: else: return TypesenseClientError + @overload + def make_request( + self, + fn: Callable[..., requests.models.Response], + endpoint: str, + as_json: Literal[True], + **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: ... + + @overload + def make_request( + self, + fn: Callable[..., requests.models.Response], + endpoint: str, + as_json: Literal[False], + **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: ... + # Makes the actual http request, along with retries - def make_request(self, fn, endpoint, as_json, **kwargs): + def make_request( + self, + fn: Callable[..., requests.models.Response], + endpoint: str, + as_json: bool, + **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict | str: num_tries = 0 last_exception = None @@ -135,36 +181,85 @@ def set_node_healthcheck(self, node: Node, is_healthy: bool) -> None: node.last_access_ts = int(time.time()) @staticmethod - def normalize_params(params): + def normalize_params(params: TParams) -> None: for key in params.keys(): if isinstance(params[key], bool) and params[key]: params[key] = 'true' elif isinstance(params[key], bool) and not params[key]: params[key] = 'false' - def get(self, endpoint, params=None, as_json=True): params = params or {} return self.make_request(session.get, endpoint, as_json, params=params, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - - def post(self, endpoint, body, params=None, as_json=True): params = params or {} ApiCall.normalize_params(params) return self.make_request(session.post, endpoint, as_json, params=params, data=body, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - def put(self, endpoint, body, params=None): return self.make_request(session.put, endpoint, True, params=params, data=body, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - - def patch(self, endpoint, body, params=None): return self.make_request(session.patch, endpoint, True, params=params, data=body, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - - def delete(self, endpoint, params=None): return self.make_request(session.delete, endpoint, True, params=params, timeout=self.config.connection_timeout_seconds, verify=self.config.verify) + @overload + def get( + self, endpoint: str, as_json: Literal[False], params: TParams | None = None + ) -> str: ... + + @overload + def get( + self, endpoint: str, as_json: Literal[True], params: TParams | None = None + ) -> TEntityDict: ... + + def get( + self, + endpoint: str, + as_json: Literal[True] | Literal[False] = True, + params: TParams | None = None, + ) -> TEntityDict | str: + @overload + def post( + self, + endpoint: str, + body: TBody, + as_json: Literal[False], + params: TParams | None = None, + ) -> str: ... + + @overload + def post( + self, + endpoint: str, + body: TBody, + as_json: Literal[True], + params: TParams | None = None, + ) -> TEntityDict: ... + + def post( + self, + endpoint: str, + body: TBody, + as_json: Literal[True, False], + params: TParams | None = None, + ) -> str | TEntityDict: + + def put( + self, + endpoint: str, + body: TBody, + params: TParams | None = None, + ) -> TEntityDict: + + def patch( + self, + endpoint: str, + body: TBody, + params: TParams | None = None, + ) -> TEntityDict: + + def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: From 47ab8c0031e43cb5362de7a56d27d9fb117c73c5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:14:58 +0300 Subject: [PATCH 052/288] fix(api_call): fix type of exception raised if max retries are reached - Correct type annotation for `last_exception` in `make_request` method to `BaseException`. This ensures accurate exception handling and resolves type errors, as `None` cannot be raised --- src/typesense/api_call.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 0097fc9..3d4b7eb 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -130,7 +130,7 @@ def make_request( **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict | str: num_tries = 0 - last_exception = None + last_exception: BaseException logger.debug('Making {} {}'.format(fn.__name__, endpoint)) From 587c413a150807685408b249d8944c1396f42905 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:20:17 +0300 Subject: [PATCH 053/288] refactor(api_call): refactor falsy parameters in requests - In python, an empty dictionary `{}` won't get passed as an empty dictionary in the function call, and its behavior is the same as passing `None`. - It previously inferred the type of dict[Never, Never] and served no purpose. --- src/typesense/api_call.py | 65 ++++++++++++++++++++++++++++----------- 1 file changed, 47 insertions(+), 18 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 3d4b7eb..69f15e1 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -188,24 +188,6 @@ def normalize_params(params: TParams) -> None: elif isinstance(params[key], bool) and not params[key]: params[key] = 'false' - params = params or {} - return self.make_request(session.get, endpoint, as_json, - params=params, - timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - params = params or {} - ApiCall.normalize_params(params) - return self.make_request(session.post, endpoint, as_json, - params=params, data=body, - timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - return self.make_request(session.put, endpoint, True, - params=params, data=body, - timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - return self.make_request(session.patch, endpoint, True, - params=params, data=body, - timeout=self.config.connection_timeout_seconds, verify=self.config.verify) - return self.make_request(session.delete, endpoint, True, - params=params, timeout=self.config.connection_timeout_seconds, - verify=self.config.verify) @overload def get( self, endpoint: str, as_json: Literal[False], params: TParams | None = None @@ -222,6 +204,15 @@ def get( as_json: Literal[True] | Literal[False] = True, params: TParams | None = None, ) -> TEntityDict | str: + return self.make_request( + session.get, + endpoint, + as_json=as_json, + params=params, + timeout=self.config.connection_timeout_seconds, + verify=self.config.verify, + ) + @overload def post( self, @@ -247,6 +238,17 @@ def post( as_json: Literal[True, False], params: TParams | None = None, ) -> str | TEntityDict: + if params: + ApiCall.normalize_params(params) + return self.make_request( + session.post, + endpoint, + as_json=as_json, + params=params, + data=body, + timeout=self.config.connection_timeout_seconds, + verify=self.config.verify, + ) def put( self, @@ -254,6 +256,15 @@ def put( body: TBody, params: TParams | None = None, ) -> TEntityDict: + return self.make_request( + session.put, + endpoint, + True, + params=params, + data=body, + timeout=self.config.connection_timeout_seconds, + verify=self.config.verify, + ) def patch( self, @@ -261,5 +272,23 @@ def patch( body: TBody, params: TParams | None = None, ) -> TEntityDict: + return self.make_request( + session.patch, + endpoint, + True, + params=params, + data=body, + timeout=self.config.connection_timeout_seconds, + verify=self.config.verify, + ) def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: + + return self.make_request( + session.delete, + endpoint, + True, + params=params, + timeout=self.config.connection_timeout_seconds, + verify=self.config.verify, + ) From 96a02bc0926a147a732f46525e347815761a8587 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:22:12 +0300 Subject: [PATCH 054/288] fix(node): use empty string for an empty `path` parameter - If used with a space, it breaks urls, adding a space between the parts e.g. `https://localhost:8108/ test/' --- src/typesense/configuration.py | 2 +- tests/configuration_test.py | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 2385545..90cce9b 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -242,7 +242,7 @@ def _initialize_nodes( return Node( node['host'], node['port'], - node.get('path', ' '), + node.get('path', ''), node['protocol'], ) diff --git a/tests/configuration_test.py b/tests/configuration_test.py index bebc091..120888f 100644 --- a/tests/configuration_test.py +++ b/tests/configuration_test.py @@ -36,10 +36,10 @@ def test_configuration_defaults() -> None: configuration = Configuration(config) nodes = [ - Node(host="localhost", port=8108, protocol="http", path=" "), + Node(host="localhost", port=8108, protocol="http", path=""), Node(host="localhost", port=8108, protocol="http", path="3"), ] - nearest_node = Node(host="localhost", port=8108, protocol="http", path=" ") + nearest_node = Node(host="localhost", port=8108, protocol="http", path="") assert_object_lists_match(configuration.nodes, nodes) @@ -70,8 +70,8 @@ def test_configuration_explicit() -> None: configuration = Configuration(config) - nodes = [Node(host="localhost", port=8108, protocol="http", path=" ")] - nearest_node = Node(host="localhost", port=8108, protocol="http", path=" ") + nodes = [Node(host="localhost", port=8108, protocol="http", path="")] + nearest_node = Node(host="localhost", port=8108, protocol="http", path="") assert_object_lists_match(configuration.nodes, nodes) assert_match_object(configuration.nearest_node, nearest_node) @@ -96,7 +96,7 @@ def test_configuration_no_nearest_node() -> None: configuration = Configuration(config) - nodes = Node(host="localhost", port=8108, protocol="http", path=" ") + nodes = Node(host="localhost", port=8108, protocol="http", path="") for node in configuration.nodes: assert_match_object(node, nodes) From 87ab579b8ef5b8bd21a3e61d423f9a3673f01d5c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 31 Jul 2024 20:24:42 +0300 Subject: [PATCH 055/288] test(api_call): add unit tests for api call class - Implement tests for initialization, node selection, and request methods - Cover error handling, parameter normalization, and retry logic - Ensure proper behavior with nearest node and round-robin selection - Test various HTTP methods (GET, POST, PUT, PATCH, DELETE) - Ensure 99% code coverage on the test cases --- tests/api_call_test.py | 482 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 482 insertions(+) create mode 100644 tests/api_call_test.py diff --git a/tests/api_call_test.py b/tests/api_call_test.py new file mode 100644 index 0000000..8ce0229 --- /dev/null +++ b/tests/api_call_test.py @@ -0,0 +1,482 @@ +"""Unit Tests for the ApiCall class.""" + +import time + +import pytest +import requests +import requests_mock +from pytest_mock import MockerFixture + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense import exceptions +from typesense.api_call import ApiCall +from typesense.configuration import Configuration, Node + + +@pytest.fixture(scope="function", name="config") +def config_fixture() -> Configuration: + """Return a Configuration object with test values.""" + return Configuration( + config_dict={ + "api_key": "test-api-key", + "nodes": [ + { + "host": "node0", + "port": 8108, + "protocol": "http", + }, + { + "host": "node1", + "port": 8108, + "protocol": "http", + }, + { + "host": "node2", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "nearest", + "port": 8108, + "protocol": "http", + }, + "num_retries": 3, + "healthcheck_interval_seconds": 60, + "retry_interval_seconds": 0.001, + "connection_timeout_seconds": 0.001, + "verify": True, + }, + ) + + +@pytest.fixture(scope="function", name="api_call") +def api_call_fixture( + config: Configuration, +) -> ApiCall[dict[str, str], dict[str, str], dict[str, str]]: + """Return an ApiCall object with test values.""" + return ApiCall[dict[str, str], dict[str, str], dict[str, str]](config) + + +def test_initialization( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + config: Configuration, +) -> None: + """Test the initialization of the ApiCall object.""" + assert api_call.config == config + assert_object_lists_match(api_call.nodes, config.nodes) + assert api_call.node_index == 0 + + +def test_node_due_for_health_check( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it correctly identifies if a node is due for health check.""" + node = Node(host="localhost", port=8108, protocol="http", path=" ") + node.last_access_ts = time.time() - 61 + assert api_call.node_due_for_health_check(node) is True + + +def test_get_node_nearest_healthy( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it correctly selects the nearest node if it is healthy.""" + node = api_call.get_node() + assert_match_object(node, api_call.config.nearest_node) + + +def test_get_node_nearest_not_healthy( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it selects the next available node if the nearest node is not healthy.""" + api_call.config.nearest_node.healthy = False + node = api_call.get_node() + assert_match_object(node, api_call.nodes[0]) + + +def test_get_node_round_robin_selection( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + mocker: MockerFixture, +) -> None: + """Test that it selects the next available node in a round-robin fashion.""" + api_call.config.nearest_node = None + mocker.patch("time.time", return_value=100) + + node1 = api_call.get_node() + assert_match_object(node1, api_call.config.nodes[0]) + + node2 = api_call.get_node() + assert_match_object(node2, api_call.config.nodes[1]) + + node3 = api_call.get_node() + assert_match_object(node3, api_call.config.nodes[2]) + + +def test_get_exception() -> None: + """Test that it correctly returns the exception class for a given status code.""" + assert ApiCall.get_exception(0) == exceptions.HTTPStatus0Error + assert ApiCall.get_exception(400) == exceptions.RequestMalformed + assert ApiCall.get_exception(401) == exceptions.RequestUnauthorized + assert ApiCall.get_exception(403) == exceptions.RequestForbidden + assert ApiCall.get_exception(404) == exceptions.ObjectNotFound + assert ApiCall.get_exception(409) == exceptions.ObjectAlreadyExists + assert ApiCall.get_exception(422) == exceptions.ObjectUnprocessable + assert ApiCall.get_exception(500) == exceptions.ServerError + assert ApiCall.get_exception(503) == exceptions.ServiceUnavailable + assert ApiCall.get_exception(999) == exceptions.TypesenseClientError + + +def test_normalize_params_with_booleans() -> None: + """Test that it correctly normalizes boolean values to strings.""" + parameter_dict: dict[str, str | bool] = {"key1": True, "key2": False} + ApiCall.normalize_params(parameter_dict) + + assert parameter_dict == {"key1": "true", "key2": "false"} + + +def test_normalize_params_with_mixed_types() -> None: + """Test that it correctly normalizes boolean values to strings.""" + parameter_dict = {"key1": True, "key2": False, "key3": "value", "key4": 123} + ApiCall.normalize_params(parameter_dict) + assert parameter_dict == { + "key1": "true", + "key2": "false", + "key3": "value", + "key4": 123, + } + + +def test_normalize_params_with_empty_dict() -> None: + """Test that it correctly normalizes an empty dictionary.""" + parameter_dict: dict[str, str] = {} + ApiCall.normalize_params(parameter_dict) + assert not parameter_dict + + +def test_normalize_params_with_no_booleans() -> None: + """Test that it correctly normalizes a dictionary with no boolean values.""" + parameter_dict = {"key1": "value", "key2": 123} + ApiCall.normalize_params(parameter_dict) + assert parameter_dict == {"key1": "value", "key2": 123} + + +def test_make_request_as_json(api_call: ApiCall) -> None: + """Test the `make_request` method with JSON response.""" + session = requests.sessions.Session() + + with requests_mock.mock(session=session) as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + + response = api_call.make_request(session.get, "/test", as_json=True) + assert response == {"key": "value"} + + +def test_make_request_as_text(api_call: ApiCall) -> None: + """Test the `make_request` method with text response.""" + session = requests.sessions.Session() + + with requests_mock.mock(session=session) as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + text="response text", + status_code=200, + ) + + response = api_call.make_request(session.get, "/test", as_json=False) + assert response == "response text" + + +def test_get_as_json( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the GET method with JSON response.""" + with requests_mock.mock() as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + assert api_call.get("/test", as_json=True) == {"key": "value"} + + +def test_get_as_text( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the GET method with text response.""" + with requests_mock.mock() as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + text="response text", + status_code=200, + ) + assert api_call.get("/test", as_json=False) == "response text" + + +def test_post_as_json( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the POST method with JSON response.""" + with requests_mock.mock() as request_mocker: + request_mocker.post( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + assert api_call.post("/test", body={"data": "value"}, as_json=True) == { + "key": "value", + } + + +def test_post_with_params( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that the parameters are correctly passed to the request.""" + with requests_mock.Mocker() as request_mocker: + request_mocker.post( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + + parameter_set = {"key1": [True, False], "key2": False, "key3": "value"} + + post_result = api_call.post( + "/test", + params=parameter_set, + body={"key": "value"}, + as_json=True, + ) + + expected_parameter_set = { + "key1": ["true", "false"], + "key2": ["false"], + "key3": ["value"], + } + + request = request_mocker.request_history[0] + + assert request.qs == expected_parameter_set + assert post_result == {"key": "value"} + + +def test_post_as_text( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the POST method with text response.""" + with requests_mock.mock() as request_mocker: + request_mocker.post( + "http://nearest:8108/test", + text="response text", + status_code=200, + ) + post_result = api_call.post("/test", body={"data": "value"}, as_json=False) + assert post_result == "response text" + + +def test_put_as_json( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the PUT method with JSON response.""" + with requests_mock.mock() as request_mocker: + request_mocker.put( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + assert api_call.put("/test", body={"data": "value"}) == {"key": "value"} + + +def test_patch_as_json( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the PATCH method with JSON response.""" + with requests_mock.mock() as request_mocker: + request_mocker.patch( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + assert api_call.patch("/test", body={"data": "value"}) == {"key": "value"} + + +def test_delete_as_json( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test the DELETE method with JSON response.""" + with requests_mock.mock() as request_mocker: + request_mocker.delete( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + + response = api_call.delete("/test") + assert response == {"key": "value"} + + +def test_raise_custom_exception_with_header( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it raises a custom exception with the error message.""" + with requests_mock.mock() as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + json={"message": "Test error"}, + status_code=400, + headers={"Content-Type": "application/json"}, + ) + + with pytest.raises(exceptions.RequestMalformed) as exception: + api_call.make_request(requests.get, "/test", as_json=True) + assert str(exception.value) == "[Errno 400] Test error" + + +def test_raise_custom_exception_without_header( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it raises a custom exception with the error message.""" + with requests_mock.mock() as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + json={"message": "Test error"}, + status_code=400, + ) + + with pytest.raises(exceptions.RequestMalformed) as exception: + api_call.make_request(requests.get, "/test", as_json=True) + assert str(exception.value) == "[Errno 400] API error." + + +def test_selects_next_available_node_on_timeout( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it selects the next available node if the request times out.""" + with requests_mock.mock() as request_mocker: + api_call.config.nearest_node = None + request_mocker.get( + "http://node0:8108/test", + exc=requests.exceptions.ConnectTimeout, + ) + request_mocker.get( + "http://node1:8108/test", + exc=requests.exceptions.ConnectTimeout, + ) + request_mocker.get( + "http://node2:8108/test", + json={"key": "value"}, + status_code=200, + ) + + response = api_call.get("/test", as_json=True) + + assert response == {"key": "value"} + assert request_mocker.request_history[0].url == "http://node0:8108/test" + assert request_mocker.request_history[1].url == "http://node1:8108/test" + assert request_mocker.request_history[2].url == "http://node2:8108/test" + assert request_mocker.call_count == 3 + + +def test_raises_if_no_nodes_are_healthy_with_the_last_exception( + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it raises the last exception if no nodes are healthy.""" + with requests_mock.mock() as request_mocker: + request_mocker.get( + "http://nearest:8108/", + exc=requests.exceptions.ConnectTimeout, + ) + request_mocker.get("http://node0:8108/", exc=requests.exceptions.ConnectTimeout) + request_mocker.get("http://node1:8108/", exc=requests.exceptions.ConnectTimeout) + request_mocker.get("http://node2:8108/", exc=requests.exceptions.SSLError) + + with pytest.raises(requests.exceptions.SSLError): + api_call.get("/") + + +def test_uses_nearest_node_if_present_and_healthy( + mocker: MockerFixture, + api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], +) -> None: + """Test that it uses the nearest node if it is present and healthy.""" + with requests_mock.Mocker() as request_mocker: + request_mocker.get( + "http://nearest:8108/", + exc=requests.exceptions.ConnectTimeout, + ) + request_mocker.get("http://node0:8108/", exc=requests.exceptions.ConnectTimeout) + request_mocker.get("http://node1:8108/", exc=requests.exceptions.ConnectTimeout) + request_mocker.get( + "http://node2:8108/", + json={"message": "Success"}, + status_code=200, + ) + + # Freeze time + current_time = time.time() + mocker.patch("time.time", return_value=current_time) + + # Perform the requests + + # 1 should go to nearest, + # 2 should go to node0, + # 3 should go to node1, + # 4 should go to node2 and resolve the request: 4 requests + api_call.get("/") + # 1 should go to node2 and resolve the request: 1 request + api_call.get("/") + # 1 should go to node2 and resolve the request: 1 request + api_call.get("/") + + # Advance time by 5 seconds + mocker.patch("time.time", return_value=current_time + 5) + api_call.get("/") # 1 should go to node2 and resolve the request: 1 request + + # Advance time by 65 seconds + mocker.patch("time.time", return_value=current_time + 65) + + # 1 should go to nearest, + # 2 should go to node0, + # 3 should go to node1, + # 4 should go to node2 and resolve the request: 4 requests + api_call.get("/") + + # Advance time by 185 seconds + mocker.patch("time.time", return_value=current_time + 185) + + # Resolve the request on the nearest node + request_mocker.get( + "http://nearest:8108/", + json={"message": "Success"}, + status_code=200, + ) + + # 1 should go to nearest and resolve the request: 1 request + api_call.get("/") + # 1 should go to nearest and resolve the request: 1 request + api_call.get("/") + # 1 should go to nearest and resolve the request: 1 request + api_call.get("/") + + # Check the request history + assert request_mocker.request_history[0].url == "http://nearest:8108/" + assert request_mocker.request_history[1].url == "http://node0:8108/" + assert request_mocker.request_history[2].url == "http://node1:8108/" + assert request_mocker.request_history[3].url == "http://node2:8108/" + + assert request_mocker.request_history[4].url == "http://node2:8108/" + assert request_mocker.request_history[5].url == "http://node2:8108/" + + assert request_mocker.request_history[6].url == "http://node2:8108/" + + assert request_mocker.request_history[7].url == "http://nearest:8108/" + assert request_mocker.request_history[8].url == "http://node0:8108/" + assert request_mocker.request_history[9].url == "http://node1:8108/" + assert request_mocker.request_history[10].url == "http://node2:8108/" + + assert request_mocker.request_history[11].url == "http://nearest:8108/" + assert request_mocker.request_history[12].url == "http://nearest:8108/" + assert request_mocker.request_history[13].url == "http://nearest:8108/" From b3b9baf56dd87b82c3b9a89d3ed7e30348dd72b4 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 13:22:16 +0300 Subject: [PATCH 056/288] fix(typing): use typing extensions package on python versions under 3.11 --- src/typesense/api_call.py | 103 +++++++++++++++++++-------------- src/typesense/configuration.py | 62 +++++++++++--------- tests/api_call_test.py | 47 ++++++++------- 3 files changed, 118 insertions(+), 94 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 69f15e1..b9e7ea0 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -1,17 +1,10 @@ + +from __future__ import annotations + import copy import json +import sys import time -from typing import ( - Any, - Callable, - Generic, - Literal, - NotRequired, - TypedDict, - TypeVar, - Unpack, - overload, -) import requests from .exceptions import (HTTPStatus0Error, ObjectAlreadyExists, @@ -21,20 +14,34 @@ from typesense.configuration import Configuration, Node from .logger import logger session = requests.session() -TParams = TypeVar('TParams', bound=dict[str, Any]) -TBody = TypeVar('TBody', bound=dict[str, Any]) -TEntityDict = TypeVar('TEntityDict') - -class SessionFunctionKwargs(Generic[TParams, TBody], TypedDict): - params: NotRequired[TParams | None] - data: NotRequired[TBody | str] +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) +TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) +TEntityDict = typing.TypeVar("TEntityDict") +class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): + """ + Dictionary of keyword arguments for the session function. + + Attributes: + params (TParams | None): The request parameters. + data (TBody | str): The request body. + timeout (float): The timeout for the request. + verify (bool): Whether to verify + """ + + params: typing.NotRequired[TParams | None] + data: typing.NotRequired[TBody | str] timeout: float verify: bool -class ApiCall(Generic[TEntityDict, TParams, TBody]): API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' +class ApiCall(typing.Generic[TEntityDict, TParams, TBody]): def __init__(self, config: Configuration): self.config = config @@ -103,31 +110,31 @@ def get_exception(http_code: int) -> type[TypesenseClientError]: else: return TypesenseClientError - @overload + @typing.overload def make_request( self, - fn: Callable[..., requests.models.Response], + fn: typing.Callable[..., requests.models.Response], endpoint: str, - as_json: Literal[True], - **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> TEntityDict: ... + as_json: typing.Literal[True], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: - @overload + @typing.overload def make_request( self, - fn: Callable[..., requests.models.Response], + fn: typing.Callable[..., requests.models.Response], endpoint: str, - as_json: Literal[False], - **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> str: ... + as_json: typing.Literal[False], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: # Makes the actual http request, along with retries def make_request( self, - fn: Callable[..., requests.models.Response], + fn: typing.Callable[..., requests.models.Response], endpoint: str, as_json: bool, - **kwargs: Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict | str: num_tries = 0 last_exception: BaseException @@ -188,20 +195,26 @@ def normalize_params(params: TParams) -> None: elif isinstance(params[key], bool) and not params[key]: params[key] = 'false' - @overload + @typing.overload def get( - self, endpoint: str, as_json: Literal[False], params: TParams | None = None - ) -> str: ... + self, + endpoint: str, + as_json: typing.Literal[False], + params: TParams | None = None, + ) -> str: - @overload + @typing.overload def get( - self, endpoint: str, as_json: Literal[True], params: TParams | None = None - ) -> TEntityDict: ... + self, + endpoint: str, + as_json: typing.Literal[True], + params: TParams | None = None, + ) -> TEntityDict: def get( self, endpoint: str, - as_json: Literal[True] | Literal[False] = True, + as_json: typing.Literal[True] | typing.Literal[False] = True, params: TParams | None = None, ) -> TEntityDict | str: return self.make_request( @@ -213,29 +226,29 @@ def get( verify=self.config.verify, ) - @overload + @typing.overload def post( self, endpoint: str, body: TBody, - as_json: Literal[False], + as_json: typing.Literal[False], params: TParams | None = None, - ) -> str: ... + ) -> str: - @overload + @typing.overload def post( self, endpoint: str, body: TBody, - as_json: Literal[True], + as_json: typing.Literal[True], params: TParams | None = None, - ) -> TEntityDict: ... + ) -> TEntityDict: def post( self, endpoint: str, body: TBody, - as_json: Literal[True, False], + as_json: typing.Literal[True, False], params: TParams | None = None, ) -> str | TEntityDict: if params: diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 90cce9b..e7b09df 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -16,15 +16,21 @@ from __future__ import annotations +import sys import time -from typing import Literal, NotRequired, TypedDict, Union + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + from urllib.parse import urlparse from typesense.exceptions import ConfigError from typesense.logger import logger -class NodeConfigDict(TypedDict): +class NodeConfigDict(typing.TypedDict): """ A dictionary that represents the configuration for a node in the Typesense cluster. @@ -32,24 +38,24 @@ class NodeConfigDict(TypedDict): host (str): The host name of the node. port (int): The port number of the node. path (str, optional): The path of the node. - protocol (Literal['http', 'https'] | str): The protocol of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. """ host: str port: int - path: NotRequired[str] - protocol: Literal['http', 'https'] | str + path: typing.NotRequired[str] + protocol: typing.Literal["http", "https"] | str -class ConfigDict(TypedDict): +class ConfigDict(typing.TypedDict): """ A dictionary that represents the configuration for the Typesense client. Attributes: - nodes (list[Union[str, NodeConfigDict]]): A list of dictionaries or URLs that + nodes (list[typing.Union[str, NodeConfigDict]]): A list of dictionaries or URLs that represent the nodes in the cluster. - nearest_node (Union[str, NodeConfigDict]): A dictionary or URL + nearest_node (typing.Union[str, NodeConfigDict]): A dictionary or URL that represents the nearest node to the client. api_key (str): The API key to use for authentication. @@ -65,23 +71,25 @@ class ConfigDict(TypedDict): timeout_seconds (int, deprecated): The connection timeout in seconds. - master_node (Union[str, NodeConfigDict], deprecated): A dictionary or + master_node (typing.Union[str, NodeConfigDict], deprecated): A dictionary or URL that represents the master node. - read_replica_nodes (list[Union[str, NodeConfigDict]], deprecated): A list of + read_replica_nodes (list[typing.Union[str, NodeConfigDict]], deprecated): A list of dictionaries or URLs that represent the read replica nodes. """ - nodes: list[Union[str, NodeConfigDict]] - nearest_node: NotRequired[Union[str, NodeConfigDict]] + nodes: list[typing.Union[str, NodeConfigDict]] + nearest_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] api_key: str - num_retries: NotRequired[int] - interval_seconds: NotRequired[int] - healthcheck_interval_seconds: NotRequired[int] - verify: NotRequired[bool] - timeout_seconds: NotRequired[int] # deprecated - master_node: NotRequired[Union[str, NodeConfigDict]] # deprecated - read_replica_nodes: NotRequired[list[Union[str, NodeConfigDict]]] # deprecated + num_retries: typing.NotRequired[int] + interval_seconds: typing.NotRequired[int] + healthcheck_interval_seconds: typing.NotRequired[int] + verify: typing.NotRequired[bool] + timeout_seconds: typing.NotRequired[int] # deprecated + master_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] # deprecated + read_replica_nodes: typing.NotRequired[ + list[typing.Union[str, NodeConfigDict]] + ] # deprecated class Node: @@ -92,7 +100,7 @@ class Node: host (str): The host name of the node. port (str | int): The port number of the node. path (str): The path of the node. - protocol (Literal['http', 'https'] | str): The protocol of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. healthy (bool): Whether the node is healthy or not. """ @@ -101,7 +109,7 @@ def __init__( host: str, port: str | int, path: str, - protocol: Literal['http', 'https'] | str, + protocol: typing.Literal["http", "https"] | str, ) -> None: """ Initialize a Node object with the specified host, port, path, and protocol. @@ -110,7 +118,7 @@ def __init__( host (str): The host name of the node. port (str | int): The port number of the node. path (str): The path of the node. - protocol (Literal['http', 'https'] | str): The protocol of the node. + protocol (typing.Literal['http', 'https'] | str): The protocol of the node. """ self.host = host self.port = port @@ -208,8 +216,8 @@ def __init__( def _handle_nearest_node( self, - nearest_node: Union[str, NodeConfigDict, None], - ) -> Union[Node, None]: + nearest_node: typing.Union[str, NodeConfigDict, None], + ) -> typing.Union[Node, None]: """ Handle the nearest node configuration. @@ -225,7 +233,7 @@ def _handle_nearest_node( def _initialize_nodes( self, - node: Union[str, NodeConfigDict], + node: typing.Union[str, NodeConfigDict], ) -> Node: """ Handle the initialization of a node. @@ -286,7 +294,7 @@ def validate_required_config_fields(config_dict: ConfigDict) -> None: raise ConfigError('`api_key` is not defined.') @staticmethod - def validate_nodes(nodes: list[Union[str, NodeConfigDict]]) -> None: + def validate_nodes(nodes: list[typing.Union[str, NodeConfigDict]]) -> None: """ Validate the nodes in the configuration dictionary. @@ -309,7 +317,7 @@ def validate_nodes(nodes: list[Union[str, NodeConfigDict]]) -> None: ) @staticmethod - def validate_nearest_node(nearest_node: Union[str, NodeConfigDict]) -> None: + def validate_nearest_node(nearest_node: typing.Union[str, NodeConfigDict]) -> None: """ Validate the nearest node in the configuration dictionary. diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 8ce0229..04c3cd1 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -1,6 +1,9 @@ """Unit Tests for the ApiCall class.""" +from __future__ import annotations + import time +from typing import Dict import pytest import requests @@ -53,13 +56,13 @@ def config_fixture() -> Configuration: @pytest.fixture(scope="function", name="api_call") def api_call_fixture( config: Configuration, -) -> ApiCall[dict[str, str], dict[str, str], dict[str, str]]: +) -> ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]]: """Return an ApiCall object with test values.""" - return ApiCall[dict[str, str], dict[str, str], dict[str, str]](config) + return ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]](config) def test_initialization( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], config: Configuration, ) -> None: """Test the initialization of the ApiCall object.""" @@ -69,7 +72,7 @@ def test_initialization( def test_node_due_for_health_check( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it correctly identifies if a node is due for health check.""" node = Node(host="localhost", port=8108, protocol="http", path=" ") @@ -78,7 +81,7 @@ def test_node_due_for_health_check( def test_get_node_nearest_healthy( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it correctly selects the nearest node if it is healthy.""" node = api_call.get_node() @@ -86,7 +89,7 @@ def test_get_node_nearest_healthy( def test_get_node_nearest_not_healthy( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it selects the next available node if the nearest node is not healthy.""" api_call.config.nearest_node.healthy = False @@ -95,7 +98,7 @@ def test_get_node_nearest_not_healthy( def test_get_node_round_robin_selection( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], mocker: MockerFixture, ) -> None: """Test that it selects the next available node in a round-robin fashion.""" @@ -128,7 +131,7 @@ def test_get_exception() -> None: def test_normalize_params_with_booleans() -> None: """Test that it correctly normalizes boolean values to strings.""" - parameter_dict: dict[str, str | bool] = {"key1": True, "key2": False} + parameter_dict: Dict[str, str | bool] = {"key1": True, "key2": False} ApiCall.normalize_params(parameter_dict) assert parameter_dict == {"key1": "true", "key2": "false"} @@ -148,7 +151,7 @@ def test_normalize_params_with_mixed_types() -> None: def test_normalize_params_with_empty_dict() -> None: """Test that it correctly normalizes an empty dictionary.""" - parameter_dict: dict[str, str] = {} + parameter_dict: Dict[str, str] = {} ApiCall.normalize_params(parameter_dict) assert not parameter_dict @@ -191,7 +194,7 @@ def test_make_request_as_text(api_call: ApiCall) -> None: def test_get_as_json( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the GET method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -204,7 +207,7 @@ def test_get_as_json( def test_get_as_text( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the GET method with text response.""" with requests_mock.mock() as request_mocker: @@ -217,7 +220,7 @@ def test_get_as_text( def test_post_as_json( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the POST method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -232,7 +235,7 @@ def test_post_as_json( def test_post_with_params( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that the parameters are correctly passed to the request.""" with requests_mock.Mocker() as request_mocker: @@ -264,7 +267,7 @@ def test_post_with_params( def test_post_as_text( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the POST method with text response.""" with requests_mock.mock() as request_mocker: @@ -278,7 +281,7 @@ def test_post_as_text( def test_put_as_json( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the PUT method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -291,7 +294,7 @@ def test_put_as_json( def test_patch_as_json( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the PATCH method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -304,7 +307,7 @@ def test_patch_as_json( def test_delete_as_json( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test the DELETE method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -319,7 +322,7 @@ def test_delete_as_json( def test_raise_custom_exception_with_header( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -336,7 +339,7 @@ def test_raise_custom_exception_with_header( def test_raise_custom_exception_without_header( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -352,7 +355,7 @@ def test_raise_custom_exception_without_header( def test_selects_next_available_node_on_timeout( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it selects the next available node if the request times out.""" with requests_mock.mock() as request_mocker: @@ -381,7 +384,7 @@ def test_selects_next_available_node_on_timeout( def test_raises_if_no_nodes_are_healthy_with_the_last_exception( - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it raises the last exception if no nodes are healthy.""" with requests_mock.mock() as request_mocker: @@ -399,7 +402,7 @@ def test_raises_if_no_nodes_are_healthy_with_the_last_exception( def test_uses_nearest_node_if_present_and_healthy( mocker: MockerFixture, - api_call: ApiCall[dict[str, str], dict[str, str], dict[str, str]], + api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], ) -> None: """Test that it uses the nearest node if it is present and healthy.""" with requests_mock.Mocker() as request_mocker: From ca933f6d1968e482bd0b40ac4620ebb46ce0f6a4 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 13:22:45 +0300 Subject: [PATCH 057/288] style(configuration): prefer double quotes over single quotes --- src/typesense/configuration.py | 86 +++++++++++++++++----------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index e7b09df..f37ce09 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -132,7 +132,7 @@ def __init__( self.last_access_ts: int = int(time.time()) @classmethod - def from_url(cls, url: str) -> 'Node': + def from_url(cls, url: str) -> "Node": """ Initialize a Node object from a URL string. @@ -147,11 +147,11 @@ def from_url(cls, url: str) -> 'Node': """ parsed = urlparse(url) if not parsed.hostname: - raise ConfigError('Node URL does not contain the host name.') + raise ConfigError("Node URL does not contain the host name.") if not parsed.port: - raise ConfigError('Node URL does not contain the port.') + raise ConfigError("Node URL does not contain the port.") if not parsed.scheme: - raise ConfigError('Node URL does not contain the protocol.') + raise ConfigError("Node URL does not contain the protocol.") return cls(parsed.hostname, parsed.port, parsed.path, parsed.scheme) @@ -162,7 +162,7 @@ def url(self) -> str: Returns: str: The URL of the node """ - return f'{self.protocol}://{self.host}:{self.port}{self.path}' + return f"{self.protocol}://{self.host}:{self.port}{self.path}" class Configuration: @@ -195,21 +195,21 @@ def __init__( self.validations.validate_config_dict(config_dict) self.nodes: list[Node] = [ - self._initialize_nodes(node) for node in config_dict['nodes'] + self._initialize_nodes(node) for node in config_dict["nodes"] ] - nearest_node = config_dict.get('nearest_node', None) + nearest_node = config_dict.get("nearest_node", None) self.nearest_node = self._handle_nearest_node(nearest_node) - self.api_key = config_dict.get('api_key', ' ') + self.api_key = config_dict.get("api_key", " ") self.connection_timeout_seconds = config_dict.get( - 'connection_timeout_seconds', + "connection_timeout_seconds", 3.0, ) - self.num_retries = config_dict.get('num_retries', 3) - self.retry_interval_seconds = config_dict.get('retry_interval_seconds', 1.0) + self.num_retries = config_dict.get("num_retries", 3) + self.retry_interval_seconds = config_dict.get("retry_interval_seconds", 1.0) self.healthcheck_interval_seconds = config_dict.get( - 'healthcheck_interval_seconds', + "healthcheck_interval_seconds", 60, ) self.verify = config_dict.get("verify", True) @@ -248,10 +248,10 @@ def _initialize_nodes( return Node.from_url(node) return Node( - node['host'], - node['port'], - node.get('path', ''), - node['protocol'], + node["host"], + node["port"], + node.get("path", ""), + node["protocol"], ) @@ -270,9 +270,9 @@ def validate_config_dict(config_dict: ConfigDict) -> None: ConfigError: If the configuration dictionary is missing required fields. """ ConfigurationValidations.validate_required_config_fields(config_dict) - ConfigurationValidations.validate_nodes(config_dict['nodes']) + ConfigurationValidations.validate_nodes(config_dict["nodes"]) - nearest_node = config_dict.get('nearest_node', None) + nearest_node = config_dict.get("nearest_node", None) if nearest_node: ConfigurationValidations.validate_nearest_node(nearest_node) @@ -287,11 +287,11 @@ def validate_required_config_fields(config_dict: ConfigDict) -> None: Raises: ConfigError: If the configuration dictionary is missing required fields. """ - if not config_dict.get('nodes'): - raise ConfigError('`nodes` is not defined.') + if not config_dict.get("nodes"): + raise ConfigError("`nodes` is not defined.") - if not config_dict.get('api_key'): - raise ConfigError('`api_key` is not defined.') + if not config_dict.get("api_key"): + raise ConfigError("`api_key` is not defined.") @staticmethod def validate_nodes(nodes: list[typing.Union[str, NodeConfigDict]]) -> None: @@ -307,11 +307,11 @@ def validate_nodes(nodes: list[typing.Union[str, NodeConfigDict]]) -> None: for node in nodes: if not ConfigurationValidations.validate_node_fields(node): raise ConfigError( - ' '.join( + " ".join( [ - '`node` entry must be a URL string or a', - 'dictionary with the following required keys:', - 'host, port, protocol', + "`node` entry must be a URL string or a", + "dictionary with the following required keys:", + "host, port, protocol", ], ), ) @@ -329,11 +329,11 @@ def validate_nearest_node(nearest_node: typing.Union[str, NodeConfigDict]) -> No """ if not ConfigurationValidations.validate_node_fields(nearest_node): raise ConfigError( - ' '.join( + " ".join( [ - '`nearest_node` entry must be a URL string or a dictionary', - 'with the following required keys:', - 'host, port, protocol', + "`nearest_node` entry must be a URL string or a dictionary", + "with the following required keys:", + "host, port, protocol", ], ), ) @@ -351,7 +351,7 @@ def validate_node_fields(node: str | NodeConfigDict) -> bool: """ if isinstance(node, str): return True - expected_fields = {'host', 'port', 'protocol'} + expected_fields = {"host", "port", "protocol"} return expected_fields.issubset(node) @staticmethod @@ -363,32 +363,32 @@ def show_deprecation_warnings(config_dict: ConfigDict) -> None: config_dict (ConfigDict): The configuration dictionary to check for deprecated fields. """ - if config_dict.get('timeout_seconds'): + if config_dict.get("timeout_seconds"): logger.warn( - ' '.join( + " ".join( [ - 'Deprecation warning: timeout_seconds is now renamed', - 'to connection_timeout_seconds', + "Deprecation warning: timeout_seconds is now renamed", + "to connection_timeout_seconds", ], ), ) - if config_dict.get('master_node'): + if config_dict.get("master_node"): logger.warn( - ' '.join( + " ".join( [ - 'Deprecation warning: master_node is now consolidated', - 'to nodes,starting with Typesense Server v0.12', + "Deprecation warning: master_node is now consolidated", + "to nodes,starting with Typesense Server v0.12", ], ), ) - if config_dict.get('read_replica_nodes'): + if config_dict.get("read_replica_nodes"): logger.warn( - ' '.join( + " ".join( [ - 'Deprecation warning: read_replica_nodes is now', - 'consolidated to nodes, starting with Typesense Server v0.12', + "Deprecation warning: read_replica_nodes is now", + "consolidated to nodes, starting with Typesense Server v0.12", ], ), ) From d006a41b9883fba0ea96045fe8fa36a12989d534 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 13:34:14 +0300 Subject: [PATCH 058/288] docs(api_call): add docstrings for api call module - Add comprehensive class-level docstring for `ApiCall` - Implement method-specific docstrings for all public methods - Follow Google docstring format for consistency - Include descriptions, parameters, return types, and exceptions - Improve code documentation for better readability - Enhance maintainability with detailed method explanations --- src/typesense/api_call.py | 477 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 477 insertions(+) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index b9e7ea0..f17faa1 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -1,3 +1,36 @@ +""" +This module is responsible for making HTTP requests to the Typesense API. + +Classes: + - ApiCall: A class that makes HTTP requests to the Typesense API. + +Functions: + - get_exception: Get the exception class for a given HTTP status code. + - normalize_params: Normalize boolean values in the request parameters to strings. + - make_request: Make the actual HTTP request, along with retries. + - node_due_for_health_check: Check if a node is due for a health check. + - set_node_healthcheck: Set the health status of a node and update the + last access timestamp. + - get_node: Get a healthy host from the pool in a round-robin fashion. + - initialize_nodes: Initialize the nodes in the pool. + - get: Make a GET request to the endpoint with the given parameters. + - post: Make a POST request to the endpoint with the given parameters. + - put: Make a PUT request to the endpoint with the given parameters. + - patch: Make a PATCH request to the endpoint with the given parameters. + - delete: Make a DELETE request to the endpoint with the given parameters. + +Exceptions: + - HTTPStatus0Error: An exception raised when the status code is 0. + - RequestMalformed: An exception raised when the status code is 400. + - RequestUnauthorized: An exception raised when the status code is 401. + - RequestForbidden: An exception raised when the status code is 403. + - ObjectNotFound: An exception raised when the status code is 404. + - ObjectAlreadyExists: An exception raised when the status code is 409. + - ObjectUnprocessable: An exception raised when the status code is 422. + - ServerError: An exception raised when the status code is 500. + - ServiceUnavailable: An exception raised when the status code is 503. + - TypesenseClientError: An exception raised when the status code is not one of the above. +""" from __future__ import annotations @@ -42,8 +75,34 @@ class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' class ApiCall(typing.Generic[TEntityDict, TParams, TBody]): + """Handles API calls to Typesense with retry and node selection logic. + + This class manages API requests to Typesense, including node selection, + health checks, retries, and error handling. It supports various HTTP methods + and handles authentication and request formatting. + + Attributes: + API_KEY_HEADER_NAME (str): The header name for the API key. + config (Configuration): The configuration for the API client. + nodes (List[Node]): A copy of the nodes from the configuration. + node_index (int): The current index for round-robin node selection. + + Methods: + get_node: Selects a healthy node for the next API call. + make_request: Executes an API request with retries and error handling. + get: Performs a GET request. + post: Performs a POST request. + put: Performs a PUT request. + patch: Performs a PATCH request. + delete: Performs a DELETE request. + """ def __init__(self, config: Configuration): + """Initializes the ApiCall instance with the given configuration. + + Args: + config (Configuration): The configuration for the API client. + """ self.config = config self.nodes = copy.deepcopy(self.config.nodes) self.node_index = 0 @@ -66,6 +125,14 @@ def node_due_for_health_check(self, node: Node) -> bool: # Returns a healthy host from the pool in a round-robin fashion. # Might return an unhealthy host periodically to check for recovery. def get_node(self) -> Node: + """ + Return a healthy host from the pool in a round-robin fashion. + + Might return an unhealthy host periodically to check for recovery. + + Returns: + Node: The healthy host from the pool in a round-robin fashion. + """ if self.config.nearest_node: if self.config.nearest_node.healthy or self.node_due_for_health_check(self.config.nearest_node): logger.debug('Using nearest node.') @@ -89,6 +156,15 @@ def get_node(self) -> Node: @staticmethod def get_exception(http_code: int) -> type[TypesenseClientError]: + """ + Return the exception class for a given HTTP status code. + + Args: + http_code (int): The HTTP status code. + + Returns: + Type[TypesenseClientError]: The exception class for the given HTTP status code. + """ if http_code == 0: return HTTPStatus0Error elif http_code == 400: @@ -118,6 +194,39 @@ def make_request( as_json: typing.Literal[True], **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict: + """ + Use a session function to make a request to the endpoint with the given kwargs. + + Args: + fn (Callable[..., requests.models.Response]): The session function to use. + endpoint (str): The endpoint to make the request to. + as_json (bool): Whether to return the response as a JSON object. + kwargs (SessionFunctionKwargs): The keyword arguments for the session function. + + Returns: + TEntityDict: The response from the request in json format. + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ @typing.overload def make_request( @@ -127,6 +236,39 @@ def make_request( as_json: typing.Literal[False], **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> str: + """ + Use a session function to make a request to the endpoint with the given kwargs. + + Args: + fn (Callable[..., requests.models.Response]): The session function to use. + endpoint (str): The endpoint to make the request to. + as_json (bool): Whether to return the response as a JSON object. + kwargs (SessionFunctionKwargs): The keyword arguments for the session function. + + Returns: + str: The response from the request in text format. + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ # Makes the actual http request, along with retries def make_request( @@ -136,6 +278,39 @@ def make_request( as_json: bool, **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict | str: + """ + Use a session function to make a request to the endpoint with the given kwargs. + + Args: + fn (Callable[..., requests.models.Response]): The session function to use. + endpoint (str): The endpoint to make the request to. + as_json (bool): Whether to return the response as a JSON object. + kwargs (SessionFunctionKwargs): The keyword arguments for the session function. + + Returns: + Union[TEntityDict, str]: The response from the request. + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ num_tries = 0 last_exception: BaseException @@ -184,11 +359,24 @@ def make_request( raise last_exception def set_node_healthcheck(self, node: Node, is_healthy: bool) -> None: + """ + Set the health status of the node and updates the last access timestamp. + + Args: + node (Node): The node to set the health status of. + is_healthy (bool): Whether the node is healthy + """ node.healthy = is_healthy node.last_access_ts = int(time.time()) @staticmethod def normalize_params(params: TParams) -> None: + """ + Normalize boolean values in the request parameters to strings. + + Args: + params (TParams): The request parameters. + """ for key in params.keys(): if isinstance(params[key], bool) and params[key]: params[key] = 'true' @@ -202,6 +390,38 @@ def get( as_json: typing.Literal[False], params: TParams | None = None, ) -> str: + """ + Make a GET request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + as_json = True: Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + TEntityDict: The response from the request in json format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ @typing.overload def get( @@ -210,6 +430,38 @@ def get( as_json: typing.Literal[True], params: TParams | None = None, ) -> TEntityDict: + """ + Make a GET request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + as_json = False: Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + str: The response from the request in text format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ def get( self, @@ -217,6 +469,38 @@ def get( as_json: typing.Literal[True] | typing.Literal[False] = True, params: TParams | None = None, ) -> TEntityDict | str: + """ + Make a GET request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + as_json (bool): Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + Union[TEntityDict, str]: The response from the request + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ return self.make_request( session.get, endpoint, @@ -234,6 +518,39 @@ def post( as_json: typing.Literal[False], params: TParams | None = None, ) -> str: + """ + Make a POST request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + body (TBody): The request body. + as_json = False: Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + str: The response from the request in text format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ @typing.overload def post( @@ -243,6 +560,39 @@ def post( as_json: typing.Literal[True], params: TParams | None = None, ) -> TEntityDict: + """ + Make a POST request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + body (TBody): The request body. + as_json = True: Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + TEntityDict: The response from the request in json format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ def post( self, @@ -251,6 +601,39 @@ def post( as_json: typing.Literal[True, False], params: TParams | None = None, ) -> str | TEntityDict: + """ + Make a POST request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + body (TBody): The request body. + as_json = bool: Whether to return the response as a JSON object. + params (TParams | None): The request parameters. + + Returns: + TEntityDict | str: The response from the request + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ if params: ApiCall.normalize_params(params) return self.make_request( @@ -269,6 +652,38 @@ def put( body: TBody, params: TParams | None = None, ) -> TEntityDict: + """ + Make a PUT request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + body (TBody): The request body. + params (TParams | None): The request parameters. + + Returns: + TEntityDict: The response from the request in json format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ return self.make_request( session.put, endpoint, @@ -285,6 +700,38 @@ def patch( body: TBody, params: TParams | None = None, ) -> TEntityDict: + """ + Make a PATCH request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + body (TBody): The request body. + params (TParams | None): The request parameters. + + Returns: + TEntityDict: The response from the request in json format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + + TypesenseClientError: If the status code is not one of the above. + """ return self.make_request( session.patch, endpoint, @@ -296,7 +743,37 @@ def patch( ) def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: + """ + Make a DELETE request to the endpoint with the given parameters. + + Args: + endpoint (str): The endpoint to make the request to. + params (TParams | None): The request parameters. + + Returns: + TEntityDict: The response from the request in json format + + :raises: + HTTPStatus0Error: If the status code is 0. + + RequestMalformed: If the status code is 400. + + RequestUnauthorized: If the status code is 401. + + RequestForbidden: If the status code is 403. + + ObjectNotFound: If the status code is 404. + + ObjectAlreadyExists: If the status code is 409. + + ObjectUnprocessable: If the status code is 422. + + ServerError: If the status code is 500. + + ServiceUnavailable: If the status code is 503. + TypesenseClientError: If the status code is not one of the above. + """ return self.make_request( session.delete, endpoint, From c492a7ab1501632aea929e172cad3159e7b25bd4 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 13:39:15 +0300 Subject: [PATCH 059/288] refactor(api_call): use absolute module import for exceptions - Update exception imports to use the exceptions module - Modify get_exception method to return exceptions from module - Avoid using relative imports and importing many members of the exceptions module --- src/typesense/api_call.py | 40 +++++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index f17faa1..c24e13e 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -40,10 +40,6 @@ import time import requests -from .exceptions import (HTTPStatus0Error, ObjectAlreadyExists, - ObjectNotFound, ObjectUnprocessable, - RequestMalformed, RequestUnauthorized, RequestForbidden, - ServerError, ServiceUnavailable, TypesenseClientError) from typesense.configuration import Configuration, Node from .logger import logger session = requests.session() @@ -53,6 +49,7 @@ else: import typing_extensions as typing +import typesense.exceptions as exceptions TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) TEntityDict = typing.TypeVar("TEntityDict") @@ -155,7 +152,7 @@ def get_node(self) -> Node: return self.nodes[self.node_index] @staticmethod - def get_exception(http_code: int) -> type[TypesenseClientError]: + def get_exception(http_code: int) -> type[exceptions.TypesenseClientError]: """ Return the exception class for a given HTTP status code. @@ -166,25 +163,25 @@ def get_exception(http_code: int) -> type[TypesenseClientError]: Type[TypesenseClientError]: The exception class for the given HTTP status code. """ if http_code == 0: - return HTTPStatus0Error + return exceptions.HTTPStatus0Error elif http_code == 400: - return RequestMalformed + return exceptions.RequestMalformed elif http_code == 401: - return RequestUnauthorized + return exceptions.RequestUnauthorized elif http_code == 403: - return RequestForbidden + return exceptions.RequestForbidden elif http_code == 404: - return ObjectNotFound + return exceptions.ObjectNotFound elif http_code == 409: - return ObjectAlreadyExists + return exceptions.ObjectAlreadyExists elif http_code == 422: - return ObjectUnprocessable + return exceptions.ObjectUnprocessable elif http_code == 500: - return ServerError + return exceptions.ServerError elif http_code == 503: - return ServiceUnavailable + return exceptions.ServiceUnavailable else: - return TypesenseClientError + return exceptions.TypesenseClientError @typing.overload def make_request( @@ -345,9 +342,16 @@ def make_request( raise ApiCall.get_exception(r.status_code)(r.status_code, error_message) return r.json() if as_json else r.text - except (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.exceptions.HTTPError, - requests.exceptions.RequestException, requests.exceptions.SSLError, - HTTPStatus0Error, ServerError, ServiceUnavailable) as e: + except ( + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + requests.exceptions.HTTPError, + requests.exceptions.RequestException, + requests.exceptions.SSLError, + exceptions.HTTPStatus0Error, + exceptions.ServerError, + exceptions.ServiceUnavailable, + ) as e: # Catch the exception and retry self.set_node_healthcheck(node, False) logger.debug('Request to {}:{} failed because of {}'.format(node.host, node.port, e)) From 7989c98871d11e39cd4c6ec89aea002903108165 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 13:41:13 +0300 Subject: [PATCH 060/288] fix(api_call): init session instance using the constructor - According to `requests` documentation: > .. deprecated:: 1.0.0 This method has been deprecated since version 1.0.0 and is only kept for backwards compatibility. New code should use :class:`~requests.sessions.Session` to create a session. This may be removed at a future date. --- src/typesense/api_call.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index c24e13e..7725d3e 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -42,7 +42,6 @@ import requests from typesense.configuration import Configuration, Node from .logger import logger -session = requests.session() if sys.version_info >= (3, 11): import typing @@ -50,6 +49,7 @@ import typing_extensions as typing import typesense.exceptions as exceptions +session = requests.sessions.Session() TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) TEntityDict = typing.TypeVar("TEntityDict") From 28560ceb53dec858b690fd4124db0a664ff5eb8d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:06:18 +0300 Subject: [PATCH 061/288] refactor(api_call): use f strings for logging - Join strings with spaces when they're over the 88 column mark - Replace old string formatting with f-strings for readability --- src/typesense/api_call.py | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 7725d3e..fb10af6 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -116,7 +116,9 @@ def node_due_for_health_check(self, node: Node) -> bool: current_epoch_ts = int(time.time()) due_for_check: bool = (current_epoch_ts - node.last_access_ts) > self.config.healthcheck_interval_seconds if due_for_check: - logger.debug('Node {}:{} is due for health check.'.format(node.host, node.port)) + logger.debug( + f"Node {node.host}:{node.port} is due for health check.", + ) return due_for_check # Returns a healthy host from the pool in a round-robin fashion. @@ -311,13 +313,15 @@ def make_request( num_tries = 0 last_exception: BaseException - logger.debug('Making {} {}'.format(fn.__name__, endpoint)) + logger.debug(f"Making {fn.__name__} {endpoint}") while num_tries < (self.config.num_retries + 1): num_tries += 1 node = self.get_node() - logger.debug('Try {} to node {}:{} -- healthy? {}'.format(num_tries, node.host, node.port, node.healthy)) + logger.debug( + f"Try {num_tries} to node {node.host}:{node.port} -- healthy? {node.healthy}", + ) try: url = node.url() + endpoint @@ -328,9 +332,16 @@ def make_request( # Treat any status code > 0 and < 500 to be an indication that node is healthy # We exclude 0 since some clients return 0 when request fails - if 0 < r.status_code < 500: - logger.debug('{}:{} is healthy. Status code: {}'.format(node.host, node.port, r.status_code)) self.set_node_healthcheck(node, True) + if 0 < response.status_code < 500: + logger.debug( + "".join( + [ + f"{node.host}:{node.port} is healthy.", + f"Status code: {response.status_code}", + ], + ), + ) # We should raise a custom exception if status code is not 20X if not 200 <= r.status_code < 300: @@ -354,12 +365,22 @@ def make_request( ) as e: # Catch the exception and retry self.set_node_healthcheck(node, False) - logger.debug('Request to {}:{} failed because of {}'.format(node.host, node.port, e)) - logger.debug('Sleeping for {} and retrying...'.format(self.config.retry_interval_seconds)) + self.set_node_healthcheck(node, is_healthy=False) + logger.debug( + " ".join( + [ + f"Request to {node.host}:{node.port} failed", + "because of {connection_error}", + ], + ), + ) + logger.debug( + f"Sleeping for {self.config.retry_interval_seconds} and retrying...", + ) last_exception = e time.sleep(self.config.retry_interval_seconds) - logger.debug('No retries left. Raising last exception: {}'.format(last_exception)) + logger.debug(f"No retries left. Raising last exception: {last_exception}") raise last_exception def set_node_healthcheck(self, node: Node, is_healthy: bool) -> None: From c71d6c702d980e45926f722ea2d918e3831924d5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:12:45 +0300 Subject: [PATCH 062/288] refactor(api_call): unify error message condition for failing requests - Simplify condition to check for non-2XX status codes - Retrieve error message based on content type in response headers - Ensure consistent error message extraction for JSON responses --- src/typesense/api_call.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index fb10af6..c8a62f2 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -344,11 +344,13 @@ def make_request( ) # We should raise a custom exception if status code is not 20X - if not 200 <= r.status_code < 300: - if r.headers.get('Content-Type', '').startswith('application/json'): - error_message = r.json().get('message', 'API error.') - else: - error_message = 'API error.' + if r.status_code < 200 or r.status_code >= 300: + content_type = r.headers.get("Content-Type", "") + error_message = ( + r.json().get("message", "API error.") + if content_type.startswith("application/json") + else "API error." + ) # Raised exception will be caught and retried raise ApiCall.get_exception(r.status_code)(r.status_code, error_message) From f32b4e8a5503513c4c448997cf6d010f38db1f31 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:14:06 +0300 Subject: [PATCH 063/288] refactor(api_call): enhance type hinting for json responses on requests - Add type hinting for JSON response to avoid returning any type - Return properly typed JSON object when `as_json` is True - Only access the json property if `as_json` is True - Maintain existing functionality for non-JSON responses --- src/typesense/api_call.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index c8a62f2..575a936 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -354,7 +354,11 @@ def make_request( # Raised exception will be caught and retried raise ApiCall.get_exception(r.status_code)(r.status_code, error_message) - return r.json() if as_json else r.text + if as_json: + # Have to use type hinting to avoid returning any + resposne_json: TEntityDict = response.json() + return resposne_json # noqa: WPS331 + return response.text except ( requests.exceptions.Timeout, requests.exceptions.ConnectionError, From c5dcb35e0d4917cad7ad609404844cc99430e98b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:16:58 +0300 Subject: [PATCH 064/288] refactor(api_call): use tuple for type checking the body on requests - Use `isinstance` with a tuple to check data type in a single call - Ensure `kwargs["data"]` is JSON serialized if not a string or bytes This commit simplifies the data type check for `kwargs["data"]` in `api_call.py` by using a tuple in the `isinstance` function, enhancing code readability and maintainability. --- src/typesense/api_call.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 575a936..8c0dd4e 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -325,8 +325,8 @@ def make_request( try: url = node.url() + endpoint - if kwargs.get('data') and not (isinstance(kwargs['data'], str) or isinstance(kwargs['data'], bytes)): - kwargs['data'] = json.dumps(kwargs['data']) + if kwargs.get("data") and not isinstance(kwargs["data"], (str, bytes)): + kwargs["data"] = json.dumps(kwargs["data"]) r = fn(url, headers={ApiCall.API_KEY_HEADER_NAME: self.config.api_key}, **kwargs) From c057ee378d8fae2b7473e24362c327060b6f7768 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:18:11 +0300 Subject: [PATCH 065/288] style(api_call): rename request response variable - Rename `r` variable to resposne for better readability --- src/typesense/api_call.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 8c0dd4e..79f6a2f 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -328,7 +328,11 @@ def make_request( if kwargs.get("data") and not isinstance(kwargs["data"], (str, bytes)): kwargs["data"] = json.dumps(kwargs["data"]) - r = fn(url, headers={ApiCall.API_KEY_HEADER_NAME: self.config.api_key}, **kwargs) + response = fn( + url, + headers={ApiCall.API_KEY_HEADER_NAME: self.config.api_key}, + **kwargs, + ) # Treat any status code > 0 and < 500 to be an indication that node is healthy # We exclude 0 since some clients return 0 when request fails @@ -344,15 +348,18 @@ def make_request( ) # We should raise a custom exception if status code is not 20X - if r.status_code < 200 or r.status_code >= 300: - content_type = r.headers.get("Content-Type", "") + if response.status_code < 200 or response.status_code >= 300: + content_type = response.headers.get("Content-Type", "") error_message = ( - r.json().get("message", "API error.") + response.json().get("message", "API error.") if content_type.startswith("application/json") else "API error." ) # Raised exception will be caught and retried - raise ApiCall.get_exception(r.status_code)(r.status_code, error_message) + raise ApiCall.get_exception(response.status_code)( + response.status_code, + error_message, + ) if as_json: # Have to use type hinting to avoid returning any From c80e39a428d2e9c2ef9b294f494f9fa1234af0f3 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:33:35 +0300 Subject: [PATCH 066/288] refactor(api_call): use keyword arguments for booleans - Remove positional argument setting with boolean values --- src/typesense/api_call.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 79f6a2f..a4f8077 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -336,7 +336,6 @@ def make_request( # Treat any status code > 0 and < 500 to be an indication that node is healthy # We exclude 0 since some clients return 0 when request fails - self.set_node_healthcheck(node, True) if 0 < response.status_code < 500: logger.debug( "".join( @@ -346,6 +345,7 @@ def make_request( ], ), ) + self.set_node_healthcheck(node, is_healthy=True) # We should raise a custom exception if status code is not 20X if response.status_code < 200 or response.status_code >= 300: @@ -377,7 +377,6 @@ def make_request( exceptions.ServiceUnavailable, ) as e: # Catch the exception and retry - self.set_node_healthcheck(node, False) self.set_node_healthcheck(node, is_healthy=False) logger.debug( " ".join( @@ -725,7 +724,7 @@ def put( return self.make_request( session.put, endpoint, - True, + as_json=True, params=params, data=body, timeout=self.config.connection_timeout_seconds, @@ -773,7 +772,7 @@ def patch( return self.make_request( session.patch, endpoint, - True, + as_json=True, params=params, data=body, timeout=self.config.connection_timeout_seconds, @@ -815,7 +814,7 @@ def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: return self.make_request( session.delete, endpoint, - True, + as_json=True, params=params, timeout=self.config.connection_timeout_seconds, verify=self.config.verify, From d16ac74373ecd31ed11e0642a06702b715e53963 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:40:50 +0300 Subject: [PATCH 067/288] chore(api_call): use absolute imports --- src/typesense/api_call.py | 45 +++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index a4f8077..361632a 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -40,15 +40,28 @@ import time import requests + from typesense.configuration import Configuration, Node -from .logger import logger if sys.version_info >= (3, 11): import typing else: import typing_extensions as typing -import typesense.exceptions as exceptions +from typesense.exceptions import ( + HTTPStatus0Error, + ObjectAlreadyExists, + ObjectNotFound, + ObjectUnprocessable, + RequestForbidden, + RequestMalformed, + RequestUnauthorized, + ServerError, + ServiceUnavailable, + TypesenseClientError, +) +from typesense.logger import logger + session = requests.sessions.Session() TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) @@ -154,7 +167,7 @@ def get_node(self) -> Node: return self.nodes[self.node_index] @staticmethod - def get_exception(http_code: int) -> type[exceptions.TypesenseClientError]: + def get_exception(http_code: int) -> type[TypesenseClientError]: """ Return the exception class for a given HTTP status code. @@ -165,25 +178,25 @@ def get_exception(http_code: int) -> type[exceptions.TypesenseClientError]: Type[TypesenseClientError]: The exception class for the given HTTP status code. """ if http_code == 0: - return exceptions.HTTPStatus0Error + return HTTPStatus0Error elif http_code == 400: - return exceptions.RequestMalformed + return RequestMalformed elif http_code == 401: - return exceptions.RequestUnauthorized + return RequestUnauthorized elif http_code == 403: - return exceptions.RequestForbidden + return RequestForbidden elif http_code == 404: - return exceptions.ObjectNotFound + return ObjectNotFound elif http_code == 409: - return exceptions.ObjectAlreadyExists + return ObjectAlreadyExists elif http_code == 422: - return exceptions.ObjectUnprocessable + return ObjectUnprocessable elif http_code == 500: - return exceptions.ServerError + return ServerError elif http_code == 503: - return exceptions.ServiceUnavailable + return ServiceUnavailable else: - return exceptions.TypesenseClientError + return TypesenseClientError @typing.overload def make_request( @@ -372,9 +385,9 @@ def make_request( requests.exceptions.HTTPError, requests.exceptions.RequestException, requests.exceptions.SSLError, - exceptions.HTTPStatus0Error, - exceptions.ServerError, - exceptions.ServiceUnavailable, + HTTPStatus0Error, + ServerError, + ServiceUnavailable, ) as e: # Catch the exception and retry self.set_node_healthcheck(node, is_healthy=False) From 6b81cddde8e4bcff63ad90fd11641dfef0493118 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:42:38 +0300 Subject: [PATCH 068/288] refactor(api_call): remove uneeded else statement - Since it already returned early, the logging doesn't need to be broken down to a else statement --- src/typesense/api_call.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 361632a..efff17a 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -149,12 +149,18 @@ def get_node(self) -> Node: if self.config.nearest_node.healthy or self.node_due_for_health_check(self.config.nearest_node): logger.debug('Using nearest node.') return self.config.nearest_node - else: - logger.debug('Nearest node is unhealthy or not due for health check. Falling back to individual nodes.') i = 0 while i < len(self.nodes): i += 1 + logger.debug( + " ".join( + [ + "Nearest node is unhealthy or not due for health check.", + "Falling back to individual nodes.", + ], + ), + ) node = self.nodes[self.node_index] self.node_index = (self.node_index + 1) % len(self.nodes) From f572ad0f917e822281d697b8e5a901c9fba9a14c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:43:26 +0300 Subject: [PATCH 069/288] style(api_call): rename iteration index variable for readability --- src/typesense/api_call.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index efff17a..254ee67 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -150,9 +150,6 @@ def get_node(self) -> Node: logger.debug('Using nearest node.') return self.config.nearest_node - i = 0 - while i < len(self.nodes): - i += 1 logger.debug( " ".join( [ @@ -161,6 +158,10 @@ def get_node(self) -> Node: ], ), ) + + node_index = 0 + while node_index < len(self.nodes): + node_index += 1 node = self.nodes[self.node_index] self.node_index = (self.node_index + 1) % len(self.nodes) From e4a22b412816961b341767dbde54d174e7b4092d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:44:30 +0300 Subject: [PATCH 070/288] style(api_call): format api call module to match styling guide --- src/typesense/api_call.py | 36 +++++++++++++++++++++--------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 254ee67..36790e5 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -83,7 +83,6 @@ class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): verify: bool - API_KEY_HEADER_NAME = 'X-TYPESENSE-API-KEY' class ApiCall(typing.Generic[TEntityDict, TParams, TBody]): """Handles API calls to Typesense with retry and node selection logic. @@ -107,6 +106,8 @@ class ApiCall(typing.Generic[TEntityDict, TParams, TBody]): delete: Performs a DELETE request. """ + API_KEY_HEADER_NAME = "X-TYPESENSE-API-KEY" + def __init__(self, config: Configuration): """Initializes the ApiCall instance with the given configuration. @@ -118,16 +119,11 @@ def __init__(self, config: Configuration): self.node_index = 0 self._initialize_nodes() - def _initialize_nodes(self) -> None: - if self.config.nearest_node: - self.set_node_healthcheck(self.config.nearest_node, True) - - for node in self.nodes: - self.set_node_healthcheck(node, True) - def node_due_for_health_check(self, node: Node) -> bool: current_epoch_ts = int(time.time()) - due_for_check: bool = (current_epoch_ts - node.last_access_ts) > self.config.healthcheck_interval_seconds + due_for_check: bool = ( + current_epoch_ts - node.last_access_ts + ) > self.config.healthcheck_interval_seconds if due_for_check: logger.debug( f"Node {node.host}:{node.port} is due for health check.", @@ -146,8 +142,10 @@ def get_node(self) -> Node: Node: The healthy host from the pool in a round-robin fashion. """ if self.config.nearest_node: - if self.config.nearest_node.healthy or self.node_due_for_health_check(self.config.nearest_node): - logger.debug('Using nearest node.') + if self.config.nearest_node.healthy or self.node_due_for_health_check( + self.config.nearest_node, + ): + logger.debug("Using nearest node.") return self.config.nearest_node logger.debug( @@ -168,9 +166,10 @@ def get_node(self) -> Node: if node.healthy or self.node_due_for_health_check(node): return node - # None of the nodes are marked healthy, but some of them could have become healthy since last health check. + # None of the nodes are marked healthy, + # but some of them could have become healthy since last health check. # So we will just return the next node. - logger.debug('No healthy nodes were found. Returning the next node.') + logger.debug("No healthy nodes were found. Returning the next node.") return self.nodes[self.node_index] @staticmethod @@ -436,9 +435,9 @@ def normalize_params(params: TParams) -> None: """ for key in params.keys(): if isinstance(params[key], bool) and params[key]: - params[key] = 'true' + params[key] = "true" elif isinstance(params[key], bool) and not params[key]: - params[key] = 'false' + params[key] = "false" @typing.overload def get( @@ -839,3 +838,10 @@ def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: timeout=self.config.connection_timeout_seconds, verify=self.config.verify, ) + + def _initialize_nodes(self) -> None: + if self.config.nearest_node: + self.set_node_healthcheck(self.config.nearest_node, is_healthy=True) + + for node in self.nodes: + self.set_node_healthcheck(node, is_healthy=True) From 546a031296a44f96f6d55be7c9551bcd2175ec4a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 1 Aug 2024 15:57:54 +0300 Subject: [PATCH 071/288] feat(collection): add types for collection fields - Introduce `CollectionFieldSchema`, `RegularCollectionFieldSchema`, and `ReferenceCollectionFieldSchema` classes - Define detailed attributes for each schema class, including type, facet, optional, infix, stem, locale, sort, store, num_dim, range_index, index, and vec_dist - Enhance code readability and structure for collection field schemas --- src/typesense/collection.py | 11 ++ src/typesense/types/__init__.py | 0 src/typesense/types/collection.py | 207 ++++++++++++++++++++++++++++++ 3 files changed, 218 insertions(+) create mode 100644 src/typesense/types/__init__.py create mode 100644 src/typesense/types/collection.py diff --git a/src/typesense/collection.py b/src/typesense/collection.py index 8c1bc99..2aed87c 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -1,8 +1,19 @@ +from __future__ import annotations + +import sys +from typesense.types.collection import CollectionSchema, CollectionUpdateSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + from .overrides import Overrides from .synonyms import Synonyms from .documents import Documents + class Collection(object): def __init__(self, api_call, name): self.name = name diff --git a/src/typesense/types/__init__.py b/src/typesense/types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py new file mode 100644 index 0000000..830a2b0 --- /dev/null +++ b/src/typesense/types/collection.py @@ -0,0 +1,207 @@ +"""Collection types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_TType = typing.TypeVar("_TType") + +_FieldType = typing.Literal[ + "string", + "int32", + "int64", + "float", + "bool", + "geopoint", + "geopoint[]", + "string[]", + "int32[]", + "int64[]", + "float[]", + "bool[]", + "object", + "object[]", + "auto", + "string*", + "image", +] + +_ReferenceFieldType = typing.Literal["string", "int32", "int64", "float"] + +_Locales = typing.Literal["ja", "zh", "ko", "th", "el", "ru", "rs", "uk", "be", ""] + + +class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=False): + """ + CollectionFieldSchema represents the schema of a field in a collection. + + Attributes: + name (str): The name of the field. + type (TType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + stem (bool): Whether the field is a stem. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance. + """ + + name: str + type: typing.NotRequired[_TType] + facet: typing.NotRequired[bool] + optional: typing.NotRequired[bool] + infix: typing.NotRequired[bool] + stem: typing.NotRequired[bool] + locale: typing.NotRequired[_Locales] + sort: typing.NotRequired[bool] + store: typing.NotRequired[bool] + num_dim: typing.NotRequired[float] + range_index: typing.NotRequired[bool] + index: typing.NotRequired[bool] + vec_dist: typing.NotRequired[typing.Literal["cosine", "ip"] | str] + + +class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): + """ + The schema of a regular field in a collection. + + Attributes: + name (str): The name of the field. + type (FieldType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + stem (bool): Whether the field is a stem. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance. + """ + + +class ReferenceCollectionFieldSchema(CollectionFieldSchema[_ReferenceFieldType]): + """ + The schema of a field referencing another field from a foreign Collection. + + Attributes: + name (str): The name of the field. + type (ReferenceFieldType): The type of the field. + facet (bool): Whether the field is a facet. + optional (bool): Whether the field is optional. + infix (bool): Whether the field is an infix. + stem (bool): Whether the field is a stem. + locale (Locales): The locale of the field. + sort (bool): Whether the field is sortable. + store (bool): Whether the field is stored. + num_dim (float): The number of dimensions. + range_index (bool): Whether the field is a range index. + index (bool): Whether the field is indexed. + vec_dist (typing.Literal['cosine', 'ip'] | str): The vector distance.:w + """ + + reference: str + + +class DropCollectionFieldSchema(typing.TypedDict): + """The schema for the field in the CollectionUpdateSchema.""" + + drop: typing.Literal[True] + name: str + + +class VoiceQueryModelSchema(typing.TypedDict): + """The schema for the voice_query_model field in the CollectionCreateSchema.""" + + model_name: str + + +class CollectionCreateSchema(typing.TypedDict): + """ + The schema for the request of the Collections.create method. + + Attributes: + name (str): The name of the collection. + + fields (list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema]): The fields + of the collection. + + default_sorting_field (str): The default sorting field of the collection. + + symbols_to_index (list[str]): The symbols to index. + + token_separators (list[str]): The token separators. + + enable_nested_fields (bool): Whether nested fields are enabled. + + voice_query_model (VoiceQueryModelSchema): The voice query model. + """ + + name: str + fields: list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema] + default_sorting_field: typing.NotRequired[str] + symbols_to_index: typing.NotRequired[list[str]] + token_separators: typing.NotRequired[list[str]] + enable_nested_fields: typing.NotRequired[bool] + voice_query_model: typing.NotRequired[VoiceQueryModelSchema] + + +class CollectionSchema(CollectionCreateSchema): + """ + The schema for the response of the Collections.create method. + + Attributes: + created_at (int): The creation timestamp of the collection. + + num_documents (int): The number of documents in the collection. + + num_memory_shards (int): The number of memory shards in the collection. + + name (str): The name of the collection. + + fields (list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema]): The fields + of the collection. + + default_sorting_field (str): The default sorting field of the collection. + + symbols_to_index (list[str]): The symbols to index. + + token_separators (list[str]): The token separators. + + enable_nested_fields (bool): Whether nested fields are enabled. + + voice_query_model (VoiceQueryModelSchema): The voice query model. + """ + + created_at: int + num_documents: int + num_memory_shards: int + + +class CollectionUpdateSchema(typing.TypedDict): + """ + The schema for the request of the Collection.update method. + + Attributes: + fields (list): The fields of the collection. + + """ + + fields: list[ + typing.Union[ + RegularCollectionFieldSchema, + ReferenceCollectionFieldSchema, + DropCollectionFieldSchema, + ] + ] From fc26638345ac2baa11607f4a28eb9353497c58ad Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 10:57:12 +0300 Subject: [PATCH 072/288] fix(api_call): remove generic params from api call class - Remove Generic parameters from `ApiCall` class - Use a `entity_type` parameter on calls for setting the return type --- src/typesense/api_call.py | 27 +++++++++- tests/api_call_test.py | 111 ++++++++++++++++++++++++-------------- 2 files changed, 95 insertions(+), 43 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 36790e5..56c7541 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -66,6 +66,8 @@ TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) TEntityDict = typing.TypeVar("TEntityDict") + + class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): """ Dictionary of keyword arguments for the session function. @@ -83,7 +85,7 @@ class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): verify: bool -class ApiCall(typing.Generic[TEntityDict, TParams, TBody]): +class ApiCall: """Handles API calls to Typesense with retry and node selection logic. This class manages API requests to Typesense, including node selection, @@ -209,6 +211,7 @@ def make_request( self, fn: typing.Callable[..., requests.models.Response], endpoint: str, + entity_type: type[TEntityDict], as_json: typing.Literal[True], **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict: @@ -251,6 +254,7 @@ def make_request( self, fn: typing.Callable[..., requests.models.Response], endpoint: str, + entity_type: type[TEntityDict], as_json: typing.Literal[False], **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> str: @@ -293,6 +297,7 @@ def make_request( self, fn: typing.Callable[..., requests.models.Response], endpoint: str, + entity_type: type[TEntityDict], as_json: bool, **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> TEntityDict | str: @@ -443,6 +448,7 @@ def normalize_params(params: TParams) -> None: def get( self, endpoint: str, + entity_type: type[TEntityDict], as_json: typing.Literal[False], params: TParams | None = None, ) -> str: @@ -483,6 +489,7 @@ def get( def get( self, endpoint: str, + entity_type: type[TEntityDict], as_json: typing.Literal[True], params: TParams | None = None, ) -> TEntityDict: @@ -522,6 +529,7 @@ def get( def get( self, endpoint: str, + entity_type: type[TEntityDict], as_json: typing.Literal[True] | typing.Literal[False] = True, params: TParams | None = None, ) -> TEntityDict | str: @@ -560,6 +568,7 @@ def get( return self.make_request( session.get, endpoint, + entity_type, as_json=as_json, params=params, timeout=self.config.connection_timeout_seconds, @@ -570,6 +579,7 @@ def get( def post( self, endpoint: str, + entity_type: type[TEntityDict], body: TBody, as_json: typing.Literal[False], params: TParams | None = None, @@ -612,6 +622,7 @@ def post( def post( self, endpoint: str, + entity_type: type[TEntityDict], body: TBody, as_json: typing.Literal[True], params: TParams | None = None, @@ -653,6 +664,7 @@ def post( def post( self, endpoint: str, + entity_type: type[TEntityDict], body: TBody, as_json: typing.Literal[True, False], params: TParams | None = None, @@ -695,6 +707,7 @@ def post( return self.make_request( session.post, endpoint, + entity_type, as_json=as_json, params=params, data=body, @@ -705,6 +718,7 @@ def post( def put( self, endpoint: str, + entity_type: type[TEntityDict], body: TBody, params: TParams | None = None, ) -> TEntityDict: @@ -743,6 +757,7 @@ def put( return self.make_request( session.put, endpoint, + entity_type, as_json=True, params=params, data=body, @@ -753,6 +768,7 @@ def put( def patch( self, endpoint: str, + entity_type: type[TEntityDict], body: TBody, params: TParams | None = None, ) -> TEntityDict: @@ -791,6 +807,7 @@ def patch( return self.make_request( session.patch, endpoint, + entity_type, as_json=True, params=params, data=body, @@ -798,7 +815,12 @@ def patch( verify=self.config.verify, ) - def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: + def delete( + self, + endpoint: str, + entity_type: type[TEntityDict], + params: TParams | None = None, + ) -> TEntityDict: """ Make a DELETE request to the endpoint with the given parameters. @@ -833,6 +855,7 @@ def delete(self, endpoint: str, params: TParams | None = None) -> TEntityDict: return self.make_request( session.delete, endpoint, + entity_type, as_json=True, params=params, timeout=self.config.connection_timeout_seconds, diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 04c3cd1..481fe1b 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -56,13 +56,13 @@ def config_fixture() -> Configuration: @pytest.fixture(scope="function", name="api_call") def api_call_fixture( config: Configuration, -) -> ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]]: +) -> ApiCall: """Return an ApiCall object with test values.""" - return ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]](config) + return ApiCall(config) def test_initialization( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, config: Configuration, ) -> None: """Test the initialization of the ApiCall object.""" @@ -72,7 +72,7 @@ def test_initialization( def test_node_due_for_health_check( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it correctly identifies if a node is due for health check.""" node = Node(host="localhost", port=8108, protocol="http", path=" ") @@ -81,7 +81,7 @@ def test_node_due_for_health_check( def test_get_node_nearest_healthy( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it correctly selects the nearest node if it is healthy.""" node = api_call.get_node() @@ -89,7 +89,7 @@ def test_get_node_nearest_healthy( def test_get_node_nearest_not_healthy( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it selects the next available node if the nearest node is not healthy.""" api_call.config.nearest_node.healthy = False @@ -98,7 +98,7 @@ def test_get_node_nearest_not_healthy( def test_get_node_round_robin_selection( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, mocker: MockerFixture, ) -> None: """Test that it selects the next available node in a round-robin fashion.""" @@ -174,7 +174,9 @@ def test_make_request_as_json(api_call: ApiCall) -> None: status_code=200, ) - response = api_call.make_request(session.get, "/test", as_json=True) + response = api_call.make_request( + session.get, "/test", as_json=True, entity_type=dict[str, str] + ) assert response == {"key": "value"} @@ -189,12 +191,14 @@ def test_make_request_as_text(api_call: ApiCall) -> None: status_code=200, ) - response = api_call.make_request(session.get, "/test", as_json=False) + response = api_call.make_request( + session.get, "/test", as_json=False, entity_type=dict[str, str] + ) assert response == "response text" def test_get_as_json( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the GET method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -203,11 +207,13 @@ def test_get_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.get("/test", as_json=True) == {"key": "value"} + assert api_call.get("/test", as_json=True, entity_type=dict[str, str]) == { + "key": "value" + } def test_get_as_text( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the GET method with text response.""" with requests_mock.mock() as request_mocker: @@ -216,11 +222,14 @@ def test_get_as_text( text="response text", status_code=200, ) - assert api_call.get("/test", as_json=False) == "response text" + assert ( + api_call.get("/test", as_json=False, entity_type=dict[str, str]) + == "response text" + ) def test_post_as_json( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the POST method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -229,13 +238,15 @@ def test_post_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.post("/test", body={"data": "value"}, as_json=True) == { + assert api_call.post( + "/test", body={"data": "value"}, as_json=True, entity_type=dict[str, str] + ) == { "key": "value", } def test_post_with_params( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that the parameters are correctly passed to the request.""" with requests_mock.Mocker() as request_mocker: @@ -252,6 +263,7 @@ def test_post_with_params( params=parameter_set, body={"key": "value"}, as_json=True, + entity_type=dict[str, str], ) expected_parameter_set = { @@ -267,7 +279,7 @@ def test_post_with_params( def test_post_as_text( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the POST method with text response.""" with requests_mock.mock() as request_mocker: @@ -276,12 +288,17 @@ def test_post_as_text( text="response text", status_code=200, ) - post_result = api_call.post("/test", body={"data": "value"}, as_json=False) + post_result = api_call.post( + "/test", + body={"data": "value"}, + as_json=False, + entity_type=dict[str, str], + ) assert post_result == "response text" def test_put_as_json( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the PUT method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -290,11 +307,15 @@ def test_put_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.put("/test", body={"data": "value"}) == {"key": "value"} + assert api_call.put( + "/test", + body={"data": "value"}, + entity_type=dict[str, str], + ) == {"key": "value"} def test_patch_as_json( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the PATCH method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -303,11 +324,15 @@ def test_patch_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.patch("/test", body={"data": "value"}) == {"key": "value"} + assert api_call.patch( + "/test", + body={"data": "value"}, + entity_type=dict[str, str], + ) == {"key": "value"} def test_delete_as_json( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test the DELETE method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -317,12 +342,12 @@ def test_delete_as_json( status_code=200, ) - response = api_call.delete("/test") + response = api_call.delete("/test", entity_type=dict[str, str]) assert response == {"key": "value"} def test_raise_custom_exception_with_header( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -334,12 +359,14 @@ def test_raise_custom_exception_with_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call.make_request(requests.get, "/test", as_json=True) + api_call.make_request( + requests.get, "/test", as_json=True, entity_type=dict[str, str] + ) assert str(exception.value) == "[Errno 400] Test error" def test_raise_custom_exception_without_header( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -350,12 +377,14 @@ def test_raise_custom_exception_without_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call.make_request(requests.get, "/test", as_json=True) + api_call.make_request( + requests.get, "/test", as_json=True, entity_type=dict[str, str] + ) assert str(exception.value) == "[Errno 400] API error." def test_selects_next_available_node_on_timeout( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it selects the next available node if the request times out.""" with requests_mock.mock() as request_mocker: @@ -374,7 +403,7 @@ def test_selects_next_available_node_on_timeout( status_code=200, ) - response = api_call.get("/test", as_json=True) + response = api_call.get("/test", as_json=True, entity_type=dict[str, str]) assert response == {"key": "value"} assert request_mocker.request_history[0].url == "http://node0:8108/test" @@ -384,7 +413,7 @@ def test_selects_next_available_node_on_timeout( def test_raises_if_no_nodes_are_healthy_with_the_last_exception( - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it raises the last exception if no nodes are healthy.""" with requests_mock.mock() as request_mocker: @@ -397,12 +426,12 @@ def test_raises_if_no_nodes_are_healthy_with_the_last_exception( request_mocker.get("http://node2:8108/", exc=requests.exceptions.SSLError) with pytest.raises(requests.exceptions.SSLError): - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) def test_uses_nearest_node_if_present_and_healthy( mocker: MockerFixture, - api_call: ApiCall[Dict[str, str], Dict[str, str], Dict[str, str]], + api_call: ApiCall, ) -> None: """Test that it uses the nearest node if it is present and healthy.""" with requests_mock.Mocker() as request_mocker: @@ -428,15 +457,15 @@ def test_uses_nearest_node_if_present_and_healthy( # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # Advance time by 5 seconds mocker.patch("time.time", return_value=current_time + 5) - api_call.get("/") # 1 should go to node2 and resolve the request: 1 request + api_call.get('/', entity_type=dict[str, str]) # 1 should go to node2 and resolve the request: 1 request # Advance time by 65 seconds mocker.patch("time.time", return_value=current_time + 65) @@ -445,7 +474,7 @@ def test_uses_nearest_node_if_present_and_healthy( # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # Advance time by 185 seconds mocker.patch("time.time", return_value=current_time + 185) @@ -458,11 +487,11 @@ def test_uses_nearest_node_if_present_and_healthy( ) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/") + api_call.get('/', entity_type=dict[str, str]) # Check the request history assert request_mocker.request_history[0].url == "http://nearest:8108/" From f916eda10a47d0b95b9ffd45c9ca5ccf4f0195a8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:01:42 +0300 Subject: [PATCH 073/288] feat(collections): add type hints for collections class - Update Collections class methods with proper type annotations and return types. - Improve __getitem__ implementation. --- src/typesense/collections.py | 48 +++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 4f98eb8..35a2b68 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -1,21 +1,47 @@ +from __future__ import annotations + +import sys +from email.policy import default +from typing import TYPE_CHECKING + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.collection import CollectionCreateSchema, CollectionSchema + from .collection import Collection class Collections(object): RESOURCE_PATH = '/collections' - def __init__(self, api_call): + def __init__(self, api_call: ApiCall): self.api_call = api_call - self.collections = {} - - def __getitem__(self, collection_name): - if collection_name not in self.collections: - self.collections[collection_name] = Collection(self.api_call, collection_name) + self.collections: dict[str, Collection] = {} - return self.collections.get(collection_name) + def __getitem__(self, collection_name: str) -> Collection: + if not self.collections.get(collection_name): + self.collections[collection_name] = Collection( + self.api_call, collection_name + ) + return self.collections[collection_name] - def create(self, schema): - return self.api_call.post(Collections.RESOURCE_PATH, schema) + def create(self, schema: CollectionCreateSchema) -> CollectionSchema: + call: CollectionSchema = self.api_call.post( + endpoint=Collections.RESOURCE_PATH, + entity_type=CollectionSchema, + as_json=True, + body=schema, + ) + return call - def retrieve(self): - return self.api_call.get('{0}'.format(Collections.RESOURCE_PATH)) + def retrieve(self) -> list[CollectionSchema]: + call: list[CollectionSchema] = self.api_call.get( + endpoint=Collections.RESOURCE_PATH, + as_json=True, + entity_type=list[CollectionSchema], + ) + return call From 1f2d4027bfedde5ab0b7352b95e5d2eea06cb0ae Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:03:18 +0300 Subject: [PATCH 074/288] refactor(test-utils): support dicts for object matching - Refactor assert_to_contain_object for dict support - Update function to handle both object and dictionary inputs. - Replace __dict__ attribute access with obj_to_dict conversion for more flexible comparison of actual and expected values. --- tests/utils/object_assertions.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py index e8195a2..06ae183 100644 --- a/tests/utils/object_assertions.py +++ b/tests/utils/object_assertions.py @@ -45,19 +45,15 @@ def assert_match_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) -def assert_to_contain_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: +def assert_to_contain_object( + actual: TObj | dict[str, Any], expected: TObj | dict[str, Any] +) -> None: """Assert that two objects have the same attribute values.""" - actual_attrs = actual.__dict__ + actual_attrs = obj_to_dict(actual) - if isinstance(expected, dict): - expected_attrs = expected - else: - expected_attrs = expected.__dict__ + expected_attrs = obj_to_dict(expected) for key, _ in expected_attrs.items(): - if not isinstance(key, str): - continue - assert key in actual_attrs, f"Attribute {key} not found in expected object" if actual_attrs[key] != expected_attrs[key]: From 9dc0240e475d8c3832d8251fbf0d2d0a70af102c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:09:47 +0300 Subject: [PATCH 075/288] feat(test-utils): add testing fixtures for pytest - Introduce fixtures for creating and managing Typesense collections, overrides, and synonyms. - Include both actual and fake configurations for various Typesense components to facilitate comprehensive testing. --- tests/conftest.py | 195 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 195 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index bceef17..467209b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,200 @@ """Pytest configuration file.""" import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.collection import Collection +from typesense.collections import Collections +from typesense.configuration import Configuration +from typesense.override import Override +from typesense.overrides import Overrides +from typesense.synonym import Synonym +from typesense.synonyms import Synonyms pytest.register_assert_rewrite("utils.object_assertions") + + +@pytest.fixture(scope="function", name="delete_all") +def clear_typesense_collections() -> None: + """Remove all collections from the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers) + response.raise_for_status() + collections = response.json() + + # Delete each collection + for collection in collections: + collection_name = collection["name"] + delete_url = f"{url}/{collection_name}" + delete_response = requests.delete(delete_url, headers=headers) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_collection") +def create_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=data) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_override") +def create_override_fixture(create_collection: None) -> None: + url = "http://localhost:8108/collections/companies/overrides/company_override" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + response = requests.put(url, headers=headers, json=data) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_synonym") +def create_synonym_fixture(create_collection: None) -> None: + url = "http://localhost:8108/collections/companies/synonyms/company_synonym" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "synonyms": ["companies", "corporations", "firms"], + } + + response = requests.put(url, headers=headers, json=data) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_config") +def actual_config_fixture() -> Configuration: + return Configuration( + config_dict={ + "api_key": "xyz", + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + } + ], + } + ) + + +@pytest.fixture(scope="function", name="actual_api_call") +def actual_api_call_fixture(actual_config: Configuration) -> ApiCall: + return ApiCall(actual_config) + + +@pytest.fixture(scope="function", name="actual_collections") +def actual_collections_fixture(actual_api_call: ApiCall) -> Collections: + return Collections(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_overrides") +def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: + return Overrides(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="actual_synonyms") +def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: + return Synonyms(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_config") +def fake_config_fixture() -> Configuration: + """Return a Configuration object with test values.""" + return Configuration( + config_dict={ + "api_key": "test-api-key", + "nodes": [ + { + "host": "node0", + "port": 8108, + "protocol": "http", + }, + { + "host": "node1", + "port": 8108, + "protocol": "http", + }, + { + "host": "node2", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "nearest", + "port": 8108, + "protocol": "http", + }, + "num_retries": 3, + "healthcheck_interval_seconds": 60, + "retry_interval_seconds": 0.001, + "connection_timeout_seconds": 0.001, + "verify": True, + }, + ) + + +@pytest.fixture(scope="function", name="fake_api_call") +def fake_api_call_fixture( + fake_config: Configuration, +) -> ApiCall: + """Return an ApiCall object with test values.""" + return ApiCall(fake_config) + + +@pytest.fixture(scope="function", name="fake_collections") +def fake_collections_fixture(fake_api_call: ApiCall) -> Collections: + """Return a Collection object with test values.""" + return Collections(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_collection") +def fake_collection_fixture(fake_api_call: ApiCall) -> Collection: + """Return a Collection object with test values.""" + return Collection(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_overrides") +def fake_overrides_fixture(fake_api_call: ApiCall) -> Overrides: + """Return a Collection object with test values.""" + return Overrides(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_override") +def fake_override_fixture(fake_api_call: ApiCall) -> Override: + """Return a Collection object with test values.""" + return Override(fake_api_call, "companies", "company_override") + + +@pytest.fixture(scope="function", name="fake_synonyms") +def fake_synonyms_fixture(fake_api_call: ApiCall) -> Synonyms: + """Return a Collection object with test values.""" + return Synonyms(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_synonym") +def fake_override_synonym(fake_api_call: ApiCall) -> Synonym: + """Return a Collection object with test values.""" + return Synonym(fake_api_call, "companies", "company_synonym") From a55197177859fabbe126fee2e96558194fa20c46 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:11:00 +0300 Subject: [PATCH 076/288] test(collections): add tests for collections class Implement comprehensive test suite for Collections class, covering initialization, retrieval, creation, and interaction with both mocked and actual Typesense server instances. --- tests/collections_test.py | 277 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 277 insertions(+) create mode 100644 tests/collections_test.py diff --git a/tests/collections_test.py b/tests/collections_test.py new file mode 100644 index 0000000..18ea40d --- /dev/null +++ b/tests/collections_test.py @@ -0,0 +1,277 @@ +"""Tests for the Collections class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.api_call import ApiCall +from typesense.collections import Collections +from typesense.types.collection import CollectionSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Collections object is initialized correctly.""" + collections = Collections(fake_api_call) + + assert_match_object(collections.api_call, fake_api_call) + assert_object_lists_match(collections.api_call.nodes, fake_api_call.nodes) + assert_match_object( + collections.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert not collections.collections + + +def test_get_missing_collection(fake_collections: Collections) -> None: + """Test that the Collections object can get a missing collection.""" + collection = fake_collections["companies"] + + assert collection.name == "companies" + assert_match_object(collection.api_call, fake_collections.api_call) + assert_object_lists_match( + collection.api_call.nodes, + fake_collections.api_call.nodes, + ) + assert_match_object( + collection.api_call.config.nearest_node, + fake_collections.api_call.config.nearest_node, + ) + assert collection.overrides.collection_name == "companies" + assert collection._endpoint_path == "/collections/companies" # noqa: WPS437 + + +def test_get_existing_collection(fake_collections: Collections) -> None: + """Test that the Collections object can get an existing collection.""" + collection = fake_collections["companies"] + fetched_collection = fake_collections["companies"] + + assert len(fake_collections.collections) == 1 + + assert collection is fetched_collection + + +def test_retrieve(fake_collections: Collections) -> None: + """Test that the Collections object can retrieve collections.""" + json_response: list[CollectionSchema] = [ + { + "created_at": 1619711487, + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + { + "name": "num_locations", + "type": "int32", + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + }, + { + "created_at": 1619711488, + "default_sorting_field": "likes", + "enable_nested_fields": False, + "fields": [ + { + "name": "name", + "type": "string", + }, + { + "name": "likes", + "type": "int32", + }, + ], + "name": "posts", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + }, + ] + with requests_mock.Mocker() as mock: + mock.get("http://nearest:8108/collections", json=json_response) + + response = fake_collections.retrieve() + + assert len(response) == 2 + assert response[0]["name"] == "companies" + assert response[1]["name"] == "posts" + assert response == json_response + + +def test_create(fake_collections: Collections) -> None: + """Test that the Collections object can create a collection.""" + json_response: CollectionSchema = { + "created_at": 1619711487, + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + with requests_mock.Mocker() as mock: + mock.post( + "http://nearest:8108/collections", + json=json_response, + ) + + fake_collections.create( + { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "POST" + assert mock.last_request.url == "http://nearest:8108/collections" + assert mock.last_request.json() == { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + +def test_actual_create(actual_collections: Collections, delete_all: None) -> None: + """Test that the Collections object can create a collection on Typesense Server.""" + expected: CollectionSchema = { + "default_sorting_field": "", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + response = actual_collections.create( + { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + "sort": False, + }, + ], + }, + ) + + response.pop("created_at") + + assert response == expected + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collections object can retrieve collections.""" + response = actual_collections.retrieve() + + expected: list[CollectionSchema] = [ + { + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + }, + ] + + response[0].pop("created_at") + assert response == expected From 1d6ebcea5091145ca4440a0da7b550ec4e5359ef Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:12:54 +0300 Subject: [PATCH 077/288] feat(collection): add type hints for collection class --- src/typesense/collection.py | 40 +++++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/src/typesense/collection.py b/src/typesense/collection.py index 2aed87c..695cfd1 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -8,6 +8,7 @@ else: import typing_extensions as typing +from typesense.api_call import ApiCall from .overrides import Overrides from .synonyms import Synonyms from .documents import Documents @@ -15,22 +16,35 @@ class Collection(object): - def __init__(self, api_call, name): + def __init__(self, api_call: ApiCall, name: str): self.name = name self.api_call = api_call self.documents = Documents(api_call, name) self.overrides = Overrides(api_call, name) self.synonyms = Synonyms(api_call, name) - def _endpoint_path(self): - from .collections import Collections - return u"{0}/{1}".format(Collections.RESOURCE_PATH, self.name) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) - - def update(self, schema_change): - return self.api_call.patch(self._endpoint_path(), schema_change) - - def delete(self, params=None): - return self.api_call.delete(self._endpoint_path(), params) + @property + def _endpoint_path(self) -> str: + from typesense.collections import Collections + + return f"{Collections.RESOURCE_PATH}/{self.name}" + + def retrieve(self) -> CollectionSchema: + response: CollectionSchema = self.api_call.get( + endpoint=self._endpoint_path, entity_type=CollectionSchema, as_json=True + ) + return response + + def update(self, schema_change: CollectionUpdateSchema) -> CollectionUpdateSchema: + response: CollectionUpdateSchema = self.api_call.patch( + endpoint=self._endpoint_path, + body=schema_change, + entity_type=CollectionUpdateSchema, + ) + return response + + # There's currently no parameters passed to Collection deletions, but ensuring future compatibility + def delete(self, params: dict[str, str | bool] | None = None) -> CollectionSchema: + return self.api_call.delete( + self._endpoint_path, entity_type=CollectionSchema, params=params + ) From 0ec3b8d3f3c77edb66f986f39e61b728c0f6fdcd Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:13:38 +0300 Subject: [PATCH 078/288] test(collection): add tests for collection class Implement comprehensive test suite for `Collection` class, covering initialization, retrieval, creation, and interaction with both mocked and actual Typesense server instances. --- tests/collection_test.py | 246 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 246 insertions(+) create mode 100644 tests/collection_test.py diff --git a/tests/collection_test.py b/tests/collection_test.py new file mode 100644 index 0000000..0dbaeb5 --- /dev/null +++ b/tests/collection_test.py @@ -0,0 +1,246 @@ +"""Tests for the Collection class.""" + +from __future__ import annotations + +import time + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.collection import Collection +from typesense.collections import Collections +from typesense.types.collection import CollectionSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Collection object is initialized correctly.""" + collection = Collection(fake_api_call, "companies") + + assert collection.name == "companies" + assert_match_object(collection.api_call, fake_api_call) + assert_object_lists_match(collection.api_call.nodes, fake_api_call.nodes) + assert_match_object( + collection.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert collection.overrides.collection_name == "companies" + assert collection._endpoint_path == "/collections/companies" # noqa: WPS437 + + +def test_retrieve(fake_collection: Collection) -> None: + """Test that the Collection object can retrieve a collection.""" + time_now = int(time.time()) + + json_response: CollectionSchema = { + "created_at": time_now, + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + with requests_mock.mock() as mock: + mock.get( + "http://nearest:8108/collections/companies", + json=json_response, + ) + + response = fake_collection.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url == "http://nearest:8108/collections/companies" + ) + + assert response == json_response + + +def test_update(fake_collection: Collection) -> None: + """Test that the Collection object can update a collection.""" + json_response: CollectionSchema = { + "created_at": 1619711487, + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + { + "name": "num_locations", + "type": "int32", + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + with requests_mock.mock() as mock: + mock.patch( + "http://nearest:8108/collections/companies", + json=json_response, + ) + + response = fake_collection.update( + schema_change={ + "fields": [ + { + "name": "num_locations", + "type": "int32", + }, + ], + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PATCH" + assert mock.last_request.url == "http://nearest:8108/collections/companies" + assert mock.last_request.json() == { + "fields": [ + { + "name": "num_locations", + "type": "int32", + }, + ], + } + assert response == json_response + + +def test_delete(fake_collection: Collection) -> None: + """Test that the Collection object can delete a collection.""" + json_response: CollectionSchema = { + "created_at": 1619711487, + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + { + "name": "num_locations", + "type": "int32", + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + with requests_mock.mock() as mock: + mock.delete( + "http://nearest:8108/collections/companies", + json=json_response, + ) + + response = fake_collection.delete() + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "DELETE" + assert mock.last_request.url == "http://nearest:8108/collections/companies" + assert response == json_response + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collection object can retrieve a collection.""" + response = actual_collections["companies"].retrieve() + + expected: CollectionSchema = { + "created_at": int(time.time()), + "default_sorting_field": "num_employees", + "enable_nested_fields": False, + "fields": [ + { + "name": "company_name", + "type": "string", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": False, + "infix": False, + "stem": False, + }, + { + "name": "num_employees", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + }, + ], + "name": "companies", + "num_documents": 0, + "symbols_to_index": [], + "token_separators": [], + } + + assert response == expected + + +def test_actual_update( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collection object can update a collection.""" + response = actual_collections["companies"].update( + {"fields": [{"name": "num_locations", "type": "int32"}]}, + ) + + expected: CollectionSchema = { + "fields": [ + { + "name": "num_locations", + "type": "int32", + "facet": False, + "index": True, + "optional": False, + "locale": "", + "sort": True, + "infix": False, + "stem": False, + }, + ], + } + + assert_to_contain_object(response.get("fields")[0], expected.get("fields")[0]) From 8907baa794692b22b869c0dd3033a115e23afab7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:36:56 +0300 Subject: [PATCH 079/288] feat(override): add types for overrides - Introduce `OverrideQueryRuleSchema`, `OverrideFilterSchema`, and `IncludesSchema` to define request structures for override operations. - Add `OverrideCreateSchema` and `OverrideSchema` for consistent response handling. - Implement `OverrideRetrieveSchema` for retrieving override information. --- src/typesense/types/override.py | 100 ++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 src/typesense/types/override.py diff --git a/src/typesense/types/override.py b/src/typesense/types/override.py new file mode 100644 index 0000000..8009fb9 --- /dev/null +++ b/src/typesense/types/override.py @@ -0,0 +1,100 @@ +"""Override types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class OverrideQueryRuleSchema(typing.TypedDict): + """ + The schema for the rule field in the Overrides.upsert method. + + Attributes: + query (str): The query string. + match (typing.Literal['contains', 'exact']): The match type. + filter_by (str): The filter string. + tags (list[str]): The tags list. + """ + + query: str + match: typing.Literal["contains", "exact"] + filter_by: typing.NotRequired[str] + tags: typing.NotRequired[list[str]] + + +class OverrideFilterSchema(typing.TypedDict): + """ + The schema for the rule field in the Overrides.upsert method. + + Attributes: + filter_by (str): The filter string. + tags (list[str]): The tags list. + """ + + filter_by: str + tags: typing.NotRequired[list[str]] + + +class IncludesSchema(typing.TypedDict): + """ + The schema for the includes field in the Overrides.upsert method. + + Attributes: + id (str): The ID of the document. + position (int): The position of the ID in the response. + """ + + id: str + position: int + + +class OverrideCreateSchema(typing.TypedDict): + """ + The schema for the request of the Overrides.upsert method. + + Attributes: + rule (OverrideQueryRuleSchema | OverrideFilterSchema): The rule. + sort_by (str): The sort by string. + filter_by (str): The filter by string. + excludes (list[str]): The excludes list. + replace_query (str): The replace query string. + includes (list[IncludesSchema]): The includes list. + metadata (dict[str, str]): The metadata dictionary. + filter_curated_hits (bool): Whether to filter curated hits. + effective_from_ts (int): The effective from timestamp. + effective_to_ts (int): The effective to timestamp. + stop_processing (bool): Whether to stop processing. + """ + + rule: OverrideQueryRuleSchema | OverrideFilterSchema + sort_by: typing.NotRequired[str] + filter_by: typing.NotRequired[str] + excludes: typing.NotRequired[list[str]] + replace_query: typing.NotRequired[str] + includes: typing.NotRequired[list[IncludesSchema]] + metadata: typing.NotRequired[dict[str, str]] + filter_curated_hits: typing.NotRequired[bool] + effective_from_ts: typing.NotRequired[int] + effective_to_ts: typing.NotRequired[int] + stop_processing: typing.NotRequired[bool] + + +class OverrideSchema(OverrideCreateSchema): + """The schema for the response of the Overrides.upsert method.""" + + id: str + + +class OverrideDeleteSchema(typing.TypedDict): + """The schema for the response of the Overrides.delete method.""" + + id: str + + +class OverrideRetrieveSchema(typing.TypedDict): + """The schema for the response of the Overrides.retrieve method.""" + + overrides: list[OverrideSchema] From 78afd4469535bdfd79e5c283390312ff48261db1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:39:08 +0300 Subject: [PATCH 080/288] feat(overrides): add type hints for overrides class - Add type annotations for methods and attributes in `Overrides` class. - Integrate `OverrideCreateSchema`, `OverrideRetrieveSchema`, and `OverrideSchema` for method parameter and return types. - Modify constructor to use annotated parameters and initialize `overrides` as a typed dictionary. - Refactor `__getitem__` to check existence using `get()` method and return `Override` instance. - Update `_endpoint_path` to use optional type annotation for `override_id`. - Enhance `upsert` method to return `OverrideSchema` and utilize API call with type safety. - Improve `retrieve` method to return `OverrideRetrieveSchema` with appropriate type handling in API call. --- src/typesense/overrides.py | 57 ++++++++++++++++++++++++++++---------- 1 file changed, 42 insertions(+), 15 deletions(-) diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index 2b258ff..a6a7484 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -1,28 +1,55 @@ +from __future__ import annotations + +from typesense.api_call import ApiCall +from typesense.types.override import ( + OverrideCreateSchema, + OverrideRetrieveSchema, + OverrideSchema, +) + from .override import Override class Overrides(object): RESOURCE_PATH = 'overrides' - def __init__(self, api_call, collection_name): + def __init__( + self, + api_call: ApiCall, + collection_name: str, + ) -> None: self.api_call = api_call self.collection_name = collection_name - self.overrides = {} - - def __getitem__(self, override_id): - if override_id not in self.overrides: - self.overrides[override_id] = Override(self.api_call, self.collection_name, override_id) + self.overrides: dict[str, Override] = {} + def __getitem__(self, override_id: str) -> Override: + if not self.overrides.get(override_id): + self.overrides[override_id] = Override( + self.api_call, self.collection_name, override_id + ) return self.overrides[override_id] - def _endpoint_path(self, override_id=None): + def _endpoint_path(self, override_id: str | None = None) -> str: from .collections import Collections - override_id = override_id or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, - Overrides.RESOURCE_PATH, override_id) - - def upsert(self, id, schema): - return self.api_call.put(self._endpoint_path(id), schema) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + override_id = override_id or "" + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Overrides.RESOURCE_PATH, + override_id, + ) + + def upsert(self, id: str, schema: OverrideCreateSchema) -> OverrideSchema: + response: OverrideSchema = self.api_call.put( + endpoint=self._endpoint_path(id), + entity_type=OverrideSchema, + body=schema, + ) + return response + + def retrieve(self) -> OverrideRetrieveSchema: + response: OverrideRetrieveSchema = self.api_call.get( + self._endpoint_path(), entity_type=OverrideRetrieveSchema, as_json=True + ) + return response From 0ff3db30274166314643e3e056892c1b18a164be Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:43:53 +0300 Subject: [PATCH 081/288] test(overrides): add tests for overrides class - Add tests for initialization, retrieval, creation, and updating of overrides. - Utilize `requests_mock` to mock API calls for `upsert` and `retrieve` methods. - Ensure type annotations and schema integration are tested for overrides. - Verify proper endpoint paths and API call configurations. - Include tests for actual Typesense server interactions for overrides. --- tests/overrides_test.py | 187 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 187 insertions(+) create mode 100644 tests/overrides_test.py diff --git a/tests/overrides_test.py b/tests/overrides_test.py new file mode 100644 index 0000000..313963c --- /dev/null +++ b/tests/overrides_test.py @@ -0,0 +1,187 @@ +"""Tests for the Overrides class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.collections import Collections +from typesense.overrides import OverrideRetrieveSchema, Overrides, OverrideSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Overrides object is initialized correctly.""" + overrides = Overrides(fake_api_call, "companies") + + assert_match_object(overrides.api_call, fake_api_call) + assert_object_lists_match(overrides.api_call.nodes, fake_api_call.nodes) + assert_match_object( + overrides.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not overrides.overrides + + +def test_get_missing_override(fake_overrides: Overrides) -> None: + """Test that the Overrides object can get a missing override.""" + override = fake_overrides["company_override"] + + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_overrides.api_call) + assert_object_lists_match(override.api_call.nodes, fake_overrides.api_call.nodes) + assert_match_object( + override.api_call.config.nearest_node, + fake_overrides.api_call.config.nearest_node, + ) + assert override.collection_name == "companies" + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +def test_get_existing_override(fake_overrides: Overrides) -> None: + """Test that the Overrides object can get an existing override.""" + override = fake_overrides["companies"] + fetched_override = fake_overrides["companies"] + + assert len(fake_overrides.overrides) == 1 + + assert override is fetched_override + + +def test_retrieve(fake_overrides: Overrides) -> None: + """Test that the Overrides object can retrieve overrides.""" + json_response: OverrideRetrieveSchema = { + "overrides": [ + { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + }, + ], + } + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/collections/companies/overrides/", + json=json_response, + ) + + response = fake_overrides.retrieve() + + assert len(response) == 1 + assert response["overrides"][0] == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + } + assert response == json_response + + +def test_create(fake_overrides: Overrides) -> None: + """Test that the Overrides object can create a override.""" + json_response: OverrideSchema = { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/collections/companies/overrides/company_override", + json=json_response, + ) + + fake_overrides.upsert( + "company_override", + {"rule": {"match": "exact", "query": "companies"}}, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert ( + mock.last_request.url + == "http://nearest:8108/collections/companies/overrides/company_override" + ) + assert mock.last_request.json() == { + "rule": {"match": "exact", "query": "companies"}, + } + + +def test_actual_create( + actual_overrides: Overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Overrides object can create an override on Typesense Server.""" + response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + +def test_actual_update( + actual_overrides: Overrides, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Overrides object can update an override on Typesense Server.""" + create_response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) + + assert create_response == { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + update_response = actual_overrides.upsert( + "company_override", + { + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + }, + ) + + assert update_response == { + "id": "company_override", + "rule": {"match": "contains", "query": "companies"}, + "filter_by": "num_employees>20", + } + + +def test_actual_retrieve( + delete_all: None, + create_override: None, + actual_collections: Collections, +) -> None: + """Test that the Overrides object can retrieve an override from Typesense Server.""" + response = actual_collections["companies"].overrides.retrieve() + + assert len(response["overrides"]) == 1 + assert_to_contain_object( + response["overrides"][0], + { + "id": "company_override", + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + }, + ) From 49def995710b494baadb78f99a1cf48a8f5dd2ce Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:47:43 +0300 Subject: [PATCH 082/288] feat(override): add type hints for override class - Refactor `Override` class to include type annotations for `ApiCall`, `OverrideSchema`, and `OverrideDeleteSchema`. - Modify `retrieve` and `delete` methods to utilize the appropriate response schema. - Update `_endpoint_path` method to use consistent import statements. - Ensure all method signatures and return types are properly defined. --- src/typesense/override.py | 43 +++++++++++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/src/typesense/override.py b/src/typesense/override.py index b66006f..ba2a47d 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -1,17 +1,38 @@ -class Override(object): - def __init__(self, api_call, collection_name, override_id): +from __future__ import annotations + +import sys + +from typesense.api_call import ApiCall +from typesense.types.override import OverrideDeleteSchema, OverrideSchema + + +class Override: + def __init__( + self, api_call: ApiCall, collection_name: str, override_id: str + ) -> None: self.api_call = api_call self.collection_name = collection_name self.override_id = override_id - def _endpoint_path(self): - from .overrides import Overrides - from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Overrides.RESOURCE_PATH, - self.override_id) + def _endpoint_path(self) -> str: + from typesense.collections import Collections + from typesense.overrides import Overrides + + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Overrides.RESOURCE_PATH, + self.override_id, + ) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + def retrieve(self) -> OverrideSchema: + response: OverrideSchema = self.api_call.get( + self._endpoint_path(), entity_type=OverrideSchema, as_json=True + ) + return response - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> OverrideDeleteSchema: + response: OverrideDeleteSchema = self.api_call.delete( + self._endpoint_path(), entity_type=OverrideDeleteSchema + ) + return response From 3257f5ad48e3249fb3d8e62aaaf4d29afa5bad18 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 11:51:47 +0300 Subject: [PATCH 083/288] test(override): add tests for override class Add tests for: - Initialization of `Override` object - Retrieving an override - Deleting an override - Integration tests for retrieving and deleting overrides from Typesense server --- tests/override_test.py | 118 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 tests/override_test.py diff --git a/tests/override_test.py b/tests/override_test.py new file mode 100644 index 0000000..5cd8fb3 --- /dev/null +++ b/tests/override_test.py @@ -0,0 +1,118 @@ +"""Tests for the Override class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.collections import Collections +from typesense.override import Override, OverrideDeleteSchema +from typesense.types.override import OverrideSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Override object is initialized correctly.""" + override = Override(fake_api_call, "companies", "company_override") + + assert override.collection_name == "companies" + assert override.override_id == "company_override" + assert_match_object(override.api_call, fake_api_call) + assert_object_lists_match(override.api_call.nodes, fake_api_call.nodes) + assert_match_object( + override.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + override._endpoint_path() # noqa: WPS437 + == "/collections/companies/overrides/company_override" + ) + + +def test_retrieve(fake_override: Override) -> None: + """Test that the Override object can retrieve an override.""" + json_response: OverrideSchema = { + "rule": { + "match": "contains", + "query": "companies", + }, + "filter_by": "num_employees>10", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/collections/companies/overrides/company_override", + json=json_response, + ) + + response = fake_override.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/overrides/company_override" + ) + assert response == json_response + + +def test_delete(fake_override: Override) -> None: + """Test that the Override object can delete an override.""" + json_response: OverrideDeleteSchema = { + "id": "company_override", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/collections/companies/overrides/company_override", + json=json_response, + ) + + response = fake_override.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/overrides/company_override" + ) + assert response == {"id": "company_override"} + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_override: None, +) -> None: + """Test that the Override object can retrieve an override from Typesense Server.""" + response = actual_collections["companies"].overrides["company_override"].retrieve() + + assert response["rule"] == { + "match": "exact", + "query": "companies", + } + assert response["filter_by"] == "num_employees>10" + assert_to_contain_object( + response, + { + "rule": { + "match": "exact", + "query": "companies", + }, + "filter_by": "num_employees>10", + }, + ) + + +def test_actual_delete( + actual_collections: Collections, + delete_all: None, + create_override: None, +) -> None: + """Test that the Override object can delete an override from Typesense Server.""" + response = actual_collections["companies"].overrides["company_override"].delete() + + assert response == {"id": "company_override"} From ebaac75a8b733b8ab06123de0f1cfc06ae043bc2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 12:01:47 +0300 Subject: [PATCH 084/288] feat(synonyms): add types for synonyms - Introduce schemas for creating, updating, deleting and retrieving synonyms. - Introduce the response schemas for all API interactions for synonyms --- src/typesense/types/synonym.py | 71 ++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 src/typesense/types/synonym.py diff --git a/src/typesense/types/synonym.py b/src/typesense/types/synonym.py new file mode 100644 index 0000000..5024248 --- /dev/null +++ b/src/typesense/types/synonym.py @@ -0,0 +1,71 @@ +"""Synonym types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import _Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SynonymCreateSchema(typing.TypedDict): + """ + The schema for the request of the Synonyms.upsert method. + + Attributes: + synonyms (list[str]): The synonyms list. + + root (str): The root string. + + locale (Locales): The locale. + + symbols_to_index (list[str]): The symbols to index. + """ + + synonyms: typing.List[str] + root: typing.NotRequired[str] + locale: typing.NotRequired[_Locales] + symbols_to_index: typing.NotRequired[list[str]] + + +class SynonymSchema(SynonymCreateSchema): + """ + The schema for the response of the Synonyms.upsert method. + + Attributes: + id (str): The ID of the synonym. + + synonyms (list[str]): The synonyms list. + + root (str): The root string. + + locale (Locales): The locale. + + symbols_to_index (list[str]): The symbols to index. + """ + + id: str + + +class SynonymsRetrieveSchema(typing.TypedDict): + """ + The schema for the response of the Synonyms.retrieve method. + + Attributes: + synonyms(list[SynonymSchema]): The list of synonyms. + """ + + synonyms: list[SynonymSchema] + + +class SynonymDeleteSchema(typing.TypedDict): + """ + The schema for the response of the Synonyms.delete method. + + Attributes: + id (str): The ID of the synonym. + """ + + id: str From 3caf4966bb0b47422008b7e724de909350650e35 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 12:04:48 +0300 Subject: [PATCH 085/288] feat(synonyms): add type hints for synonyms class - Annotating types for methods and attributes. - Introducing `SynonymCreateSchema`, `SynonymSchema`, and `SynonymsRetrieveSchema` for better schema management. - Updating method signatures for better clarity and type enforcement. - Ensuring consistent use of schema types in API call responses. --- src/typesense/synonyms.py | 53 +++++++++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 16 deletions(-) diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 3b6cf28..77b4bc6 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -1,28 +1,49 @@ +from typesense.api_call import ApiCall +from typesense.types.synonym import ( + SynonymCreateSchema, + SynonymSchema, + SynonymsRetrieveSchema, +) + from .synonym import Synonym class Synonyms(object): RESOURCE_PATH = 'synonyms' - def __init__(self, api_call, collection_name): + def __init__(self, api_call: ApiCall, collection_name: str): self.api_call = api_call self.collection_name = collection_name - self.synonyms = {} + self.synonyms: dict[str, Synonym] = {} - def __getitem__(self, synonym_id): - if synonym_id not in self.synonyms: - self.synonyms[synonym_id] = Synonym(self.api_call, self.collection_name, synonym_id) + def __getitem__(self, synonym_id: str) -> Synonym: + if not self.synonyms.get(synonym_id): + self.synonyms[synonym_id] = Synonym( + self.api_call, self.collection_name, synonym_id + ) return self.synonyms[synonym_id] - def _endpoint_path(self, synonym_id=None): - from .collections import Collections - synonym_id = synonym_id or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, - Synonyms.RESOURCE_PATH, synonym_id) - - def upsert(self, id, schema): - return self.api_call.put(self._endpoint_path(id), schema) - - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + def _endpoint_path(self, synonym_id: str | None = None) -> str: + from typesense.collections import Collections + + synonym_id = synonym_id or "" + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Synonyms.RESOURCE_PATH, + synonym_id, + ) + + def upsert(self, id: str, schema: SynonymCreateSchema) -> SynonymSchema: + response = self.api_call.put( + self._endpoint_path(id), body=schema, entity_type=SynonymSchema + ) + + return response + + def retrieve(self) -> SynonymsRetrieveSchema: + response = self.api_call.get( + self._endpoint_path(), entity_type=SynonymsRetrieveSchema + ) + return response From 48fdd0b6ebd6b05190501e993e7f2c8c41bc4817 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 12:08:02 +0300 Subject: [PATCH 086/288] test(synonyms): add tests for synonyms class - Implement unit tests for the `Synonyms` class to ensure proper functionality. - Tests include initialization, retrieval of existing and missing synonyms, creation, and updating of synonyms. - Coverage includes both mocked and actual server interactions to validate correct behavior and API integration. --- tests/synonyms_test.py | 175 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 175 insertions(+) create mode 100644 tests/synonyms_test.py diff --git a/tests/synonyms_test.py b/tests/synonyms_test.py new file mode 100644 index 0000000..2bedb27 --- /dev/null +++ b/tests/synonyms_test.py @@ -0,0 +1,175 @@ +"""Tests for the Synonyms class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.collections import Collections +from typesense.synonyms import Synonyms, SynonymSchema, SynonymsRetrieveSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Synonyms object is initialized correctly.""" + synonyms = Synonyms(fake_api_call, "companies") + + assert_match_object(synonyms.api_call, fake_api_call) + assert_object_lists_match(synonyms.api_call.nodes, fake_api_call.nodes) + assert_match_object( + synonyms.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not synonyms.synonyms + + +def test_get_missing_synonym(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can get a missing synonym.""" + synonym = fake_synonyms["company_synonym"] + + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_synonyms.api_call) + assert_object_lists_match(synonym.api_call.nodes, fake_synonyms.api_call.nodes) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_synonyms.api_call.config.nearest_node, + ) + assert synonym.collection_name == "companies" + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +def test_get_existing_synonym(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can get an existing synonym.""" + synonym = fake_synonyms["companies"] + fetched_synonym = fake_synonyms["companies"] + + assert len(fake_synonyms.synonyms) == 1 + + assert synonym is fetched_synonym + + +def test_retrieve(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can retrieve synonyms.""" + json_response: SynonymsRetrieveSchema = { + "synonyms": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/collections/companies/synonyms/", + json=json_response, + ) + + response = fake_synonyms.retrieve() + + assert len(response) == 1 + assert response["synonyms"][0] == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + assert response == json_response + + +def test_create(fake_synonyms: Synonyms) -> None: + """Test that the Synonyms object can create a synonym.""" + json_response: SynonymSchema = { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/collections/companies/synonyms/company_synonym", + json=json_response, + ) + + fake_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert ( + mock.last_request.url + == "http://nearest:8108/collections/companies/synonyms/company_synonym" + ) + assert mock.last_request.json() == { + "synonyms": ["companies", "corporations", "firms"], + } + + +def test_actual_create( + actual_synonyms: Synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Synonyms object can create an synonym on Typesense Server.""" + response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + +def test_actual_update( + actual_synonyms: Synonyms, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Synonyms object can update an synonym on Typesense Server.""" + create_response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations", "firms"]}, + ) + + assert create_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + update_response = actual_synonyms.upsert( + "company_synonym", + {"synonyms": ["companies", "corporations"]}, + ) + + assert update_response == { + "id": "company_synonym", + "synonyms": ["companies", "corporations"], + } + + +def test_actual_retrieve( + delete_all: None, + create_synonym: None, + actual_collections: Collections, +) -> None: + """Test that the Synonyms object can retrieve an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms.retrieve() + + assert len(response["synonyms"]) == 1 + assert_to_contain_object( + response["synonyms"][0], + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) From fa78b16641137659cea3bdf204441a8112e3662a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 12:10:51 +0300 Subject: [PATCH 087/288] feat(synonym): add type hints for synonym class - Update the `Synonym` class to include type annotations for improved code clarity and type checking. - Adjust methods to explicitly specify return types --- src/typesense/synonym.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 27e2e08..6a6e1d2 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -1,17 +1,30 @@ +from typesense.api_call import ApiCall +from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema + + class Synonym(object): - def __init__(self, api_call, collection_name, synonym_id): + def __init__( + self, api_call: ApiCall, collection_name: str, synonym_id: str + ) -> None: self.api_call = api_call self.collection_name = collection_name self.synonym_id = synonym_id - def _endpoint_path(self): - from .synonyms import Synonyms + def _endpoint_path(self) -> str: from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Synonyms.RESOURCE_PATH, - self.synonym_id) + from .synonyms import Synonyms + + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Synonyms.RESOURCE_PATH, + self.synonym_id, + ) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + def retrieve(self) -> SynonymSchema: + return self.api_call.get(self._endpoint_path(), entity_type=SynonymSchema) - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> SynonymDeleteSchema: + return self.api_call.delete( + self._endpoint_path(), entity_type=SynonymDeleteSchema + ) From 0fc33c0ce5ce9728d9c08a96abd866b6c612f10a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 5 Aug 2024 12:15:03 +0300 Subject: [PATCH 088/288] test(synonym): add tests for synonym class - Implement tests for initialization, retrieval, and deletion of the `Synonym` class. - Add mock responses and assertions to verify correct behavior for these operations, including both unit and integration tests with the Typesense server. --- tests/synonym_test.py | 110 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 tests/synonym_test.py diff --git a/tests/synonym_test.py b/tests/synonym_test.py new file mode 100644 index 0000000..28a3e13 --- /dev/null +++ b/tests/synonym_test.py @@ -0,0 +1,110 @@ +"""Tests for the Synonym class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.collections import Collections +from typesense.synonym import Synonym, SynonymDeleteSchema +from typesense.synonyms import SynonymSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Synonym object is initialized correctly.""" + synonym = Synonym(fake_api_call, "companies", "company_synonym") + + assert synonym.collection_name == "companies" + assert synonym.synonym_id == "company_synonym" + assert_match_object(synonym.api_call, fake_api_call) + assert_object_lists_match(synonym.api_call.nodes, fake_api_call.nodes) + assert_match_object( + synonym.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + synonym._endpoint_path() # noqa: WPS437 + == "/collections/companies/synonyms/company_synonym" + ) + + +def test_retrieve(fake_synonym: Synonym) -> None: + """Test that the Synonym object can retrieve an synonym.""" + json_response: SynonymSchema = { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + + with requests_mock.Mocker() as mock: + mock.get( + "/collections/companies/synonyms/company_synonym", + json=json_response, + ) + + response = fake_synonym.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/synonyms/company_synonym" + ) + assert response == json_response + + +def test_delete(fake_synonym: Synonym) -> None: + """Test that the Synonym object can delete an synonym.""" + json_response: SynonymDeleteSchema = { + "id": "company_synonym", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/collections/companies/synonyms/company_synonym", + json=json_response, + ) + + response = fake_synonym.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/synonyms/company_synonym" + ) + assert response == {"id": "company_synonym"} + + +def test_actual_retrieve( + actual_collections: Collections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the Synonym object can retrieve an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms["company_synonym"].retrieve() + + assert response["id"] == "company_synonym" + + assert response["synonyms"] == ["companies", "corporations", "firms"] + assert_to_contain_object( + response, + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + }, + ) + + +def test_actual_delete( + actual_collections: Collections, + delete_all: None, + create_synonym: None, +) -> None: + """Test that the Synonym object can delete an synonym from Typesense Server.""" + response = actual_collections["companies"].synonyms["company_synonym"].delete() + + assert response == {"id": "company_synonym"} From a1de39d9115e2ec507be9225c5ed57d78096cfe5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:00:19 +0300 Subject: [PATCH 089/288] feat(alias): add types for alias - AliasCreateSchema for create request - AliasSchema for response - AliasesResponseSchema for retrieve response --- src/typesense/types/alias.py | 43 ++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 src/typesense/types/alias.py diff --git a/src/typesense/types/alias.py b/src/typesense/types/alias.py new file mode 100644 index 0000000..ac37428 --- /dev/null +++ b/src/typesense/types/alias.py @@ -0,0 +1,43 @@ +"""Alias types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AliasCreateSchema(typing.TypedDict): + """ + The schema for the request of the Aliases.create method. + + Attributes: + collection_name (str): The name of the collection. + """ + + collection_name: str + + +class AliasSchema(AliasCreateSchema): + """ + The schema for the response of the Aliases.create method. + + Attributes: + name (str): The name of the alias. + + collection_name (str): The name of the collection. + """ + + name: str + + +class AliasesResponseSchema(typing.TypedDict): + """ + The schema for the response of the Aliases.retrieve method. + + Attributes: + aliases(list[CollectionAliasSchema]): The list of aliases. + """ + + aliases: typing.List[AliasSchema] From 952e4c4bb7a0d2c2f61496c65841b99047fa52a7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:01:26 +0300 Subject: [PATCH 090/288] feat(test-utils): add fixtures for aliases Extend test setup with new fixtures: - create_another_collection for secondary test collection, for alias updating - create_alias for alias creation - Fixtures for Aliases and Alias objects in actual and fake contexts --- tests/conftest.py | 77 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 76 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 467209b..697f6c4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,8 @@ import pytest import requests +from typesense.alias import Alias +from typesense.aliases import Aliases from typesense.api_call import ApiCall from typesense.collection import Collection from typesense.collections import Collections @@ -58,6 +60,50 @@ def create_collection_fixture() -> None: response.raise_for_status() +@pytest.fixture(scope="function", name="delete_all_aliases") +def clear_typesense_aliases() -> None: + """Remove all aliases from the Typesense server.""" + url = "http://localhost:8108/aliases" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers) + response.raise_for_status() + + aliases = response.json() + + # Delete each alias + for alias in aliases["aliases"]: + alias_name = alias.get("name") + delete_url = f"{url}/{alias_name}" + delete_response = requests.delete(delete_url, headers=headers) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_another_collection") +def create_another_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "name": "companies_2", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=data) + response.raise_for_status() + + @pytest.fixture(scope="function", name="create_override") def create_override_fixture(create_collection: None) -> None: url = "http://localhost:8108/collections/companies/overrides/company_override" @@ -83,6 +129,18 @@ def create_synonym_fixture(create_collection: None) -> None: response.raise_for_status() +@pytest.fixture(scope="function", name="create_alias") +def create_alias_fixture(create_collection: None) -> None: + url = "http://localhost:8108/aliases/company_alias" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "collection_name": "companies", + } + + response = requests.put(url, headers=headers, json=data) + response.raise_for_status() + + @pytest.fixture(scope="function", name="actual_config") def actual_config_fixture() -> Configuration: return Configuration( @@ -119,6 +177,11 @@ def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: return Synonyms(actual_api_call, "companies") +@pytest.fixture(scope="function", name="actual_aliases") +def actual_aliases_fixture(actual_api_call: ApiCall) -> Aliases: + return Aliases(actual_api_call) + + @pytest.fixture(scope="function", name="fake_config") def fake_config_fixture() -> Configuration: """Return a Configuration object with test values.""" @@ -195,6 +258,18 @@ def fake_synonyms_fixture(fake_api_call: ApiCall) -> Synonyms: @pytest.fixture(scope="function", name="fake_synonym") -def fake_override_synonym(fake_api_call: ApiCall) -> Synonym: +def fake_synonym_fixture(fake_api_call: ApiCall) -> Synonym: """Return a Collection object with test values.""" return Synonym(fake_api_call, "companies", "company_synonym") + + +@pytest.fixture(scope="function", name="fake_aliases") +def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: + """Return a Collection object with test values.""" + return Aliases(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_alias") +def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: + """Return a Collection object with test values.""" + return Alias(fake_api_call, "company_alias") From 27698cfc6e51d7799d012b5d3dd615558fcea5c7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:05:18 +0300 Subject: [PATCH 091/288] feat(aliases): add type hints to aliases class Enhance `Aliases` class with type hints and use new `AliasSchema` types: - Add type annotations to methods and attributes - Update upsert and retrieve methods to use new schema types --- src/typesense/aliases.py | 41 ++++++++++++++++++++++++++-------------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/src/typesense/aliases.py b/src/typesense/aliases.py index 2545eb8..c1a2926 100644 --- a/src/typesense/aliases.py +++ b/src/typesense/aliases.py @@ -1,24 +1,37 @@ from typesense.alias import Alias +from typesense.api_call import ApiCall +from typesense.types.alias import AliasCreateSchema, AliasesResponseSchema, AliasSchema -class Aliases(object): - RESOURCE_PATH = '/aliases' +class Aliases: + RESOURCE_PATH = "/aliases" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall): self.api_call = api_call - self.aliases = {} + self.aliases: dict[str, Alias] = {} - def __getitem__(self, name): - if name not in self.aliases: + def __getitem__(self, name: str) -> Alias: + if not self.aliases.get(name): self.aliases[name] = Alias(self.api_call, name) return self.aliases.get(name) - def _endpoint_path(self, alias_name): - return u"{0}/{1}".format(Aliases.RESOURCE_PATH, alias_name) - - def upsert(self, name, mapping): - return self.api_call.put(self._endpoint_path(name), mapping) - - def retrieve(self): - return self.api_call.get(Aliases.RESOURCE_PATH) + def _endpoint_path(self, alias_name: str) -> str: + return "{0}/{1}".format(Aliases.RESOURCE_PATH, alias_name) + + def upsert(self, name: str, mapping: AliasCreateSchema) -> AliasSchema: + response: AliasSchema = self.api_call.put( + self._endpoint_path(name), + body=mapping, + entity_type=AliasSchema, + ) + + return response + + def retrieve(self) -> AliasesResponseSchema: + response: AliasesResponseSchema = self.api_call.get( + Aliases.RESOURCE_PATH, + as_json=True, + entity_type=AliasesResponseSchema, + ) + return response From c499fb76bd9c0eb8031a9aa1f7a61fa1885e0d2c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:06:38 +0300 Subject: [PATCH 092/288] test(aliases): add tests for aliases class Implement test suite for Aliases functionality: - Cover initialization, retrieval, creation, and updating of aliases - Include both mock and actual server tests - Utilize new type annotations and schemas --- tests/aliases_test.py | 156 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 156 insertions(+) create mode 100644 tests/aliases_test.py diff --git a/tests/aliases_test.py b/tests/aliases_test.py new file mode 100644 index 0000000..3868bcf --- /dev/null +++ b/tests/aliases_test.py @@ -0,0 +1,156 @@ +"""Tests for the Aliases class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.aliases import Aliases +from typesense.api_call import ApiCall +from typesense.types.alias import AliasesResponseSchema, AliasSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Aliases object is initialized correctly.""" + aliases = Aliases(fake_api_call) + + assert_match_object(aliases.api_call, fake_api_call) + assert_object_lists_match(aliases.api_call.nodes, fake_api_call.nodes) + assert_match_object( + aliases.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not aliases.aliases + + +def test_get_missing_alias(fake_aliases: Aliases) -> None: + """Test that the Aliases object can get a missing alias.""" + alias = fake_aliases["company_alias"] + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_aliases.api_call) + assert_object_lists_match(alias.api_call.nodes, fake_aliases.api_call.nodes) + assert_match_object( + alias.api_call.config.nearest_node, + fake_aliases.api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_get_existing_alias(fake_aliases: Aliases) -> None: + """Test that the Aliases object can get an existing alias.""" + alias = fake_aliases["companies"] + fetched_alias = fake_aliases["companies"] + + assert len(fake_aliases.aliases) == 1 + + assert alias is fetched_alias + + +def test_retrieve(fake_aliases: Aliases) -> None: + """Test that the Aliases object can retrieve aliases.""" + json_response: AliasesResponseSchema = { + "aliases": [ + { + "collection_name": "companies", + "name": "company_alias", + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/aliases", + json=json_response, + ) + + response = fake_aliases.retrieve() + + assert len(response) == 1 + assert response["aliases"][0] == { + "collection_name": "companies", + "name": "company_alias", + } + assert response == json_response + + +def test_create(fake_aliases: Aliases) -> None: + """Test that the Aliases object can create a alias.""" + json_response: AliasSchema = { + "collection_name": "companies", + "name": "company_alias", + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/aliases/company_alias", + json=json_response, + ) + + fake_aliases.upsert( + "company_alias", + {"collection_name": "companies", "name": "company_alias"}, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert mock.last_request.url == "http://nearest:8108/aliases/company_alias" + assert mock.last_request.json() == json_response + + +def test_actual_create(actual_aliases: Aliases, delete_all_aliases: None) -> None: + """Test that the Aliases object can create an alias on Typesense Server.""" + response = actual_aliases.upsert("company_alias", {"collection_name": "companies"}) + + assert response == {"collection_name": "companies", "name": "company_alias"} + + +def test_actual_update( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_collection: None, + create_another_collection: None, +) -> None: + """Test that the Aliases object can update an alias on Typesense Server.""" + create_response = actual_aliases.upsert( + "company_alias", + {"collection_name": "companies"}, + ) + + assert create_response == {"collection_name": "companies", "name": "company_alias"} + + update_response = actual_aliases.upsert( + "company_alias", + {"collection_name": "companies_2"}, + ) + + assert update_response == { + "collection_name": "companies_2", + "name": "company_alias", + } + + +def test_actual_retrieve( + delete_all: None, + delete_all_aliases: None, + create_alias: None, + actual_aliases: Aliases, +) -> None: + """Test that the Aliases object can retrieve an alias from Typesense Server.""" + response = actual_aliases.retrieve() + + assert len(response["aliases"]) == 1 + assert_to_contain_object( + response["aliases"][0], + { + "collection_name": "companies", + "name": "company_alias", + }, + ) From bae14dbc306e1c4be92d04593928902c248e62c0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:07:32 +0300 Subject: [PATCH 093/288] feat(alias): add type hints to alias class Enhance `Alias` class with type hints and improve method signatures: - Add type annotations to methods and attributes - Convert _endpoint_path to a property - Update retrieve and delete methods to use `AliasSchema` --- src/typesense/alias.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/src/typesense/alias.py b/src/typesense/alias.py index 5695937..c140434 100644 --- a/src/typesense/alias.py +++ b/src/typesense/alias.py @@ -1,14 +1,25 @@ +from typesense.api_call import ApiCall +from typesense.types.alias import AliasSchema + + class Alias(object): - def __init__(self, api_call, name): + def __init__(self, api_call: ApiCall, name: str): self.api_call = api_call self.name = name - def _endpoint_path(self): + @property + def _endpoint_path(self) -> str: from .aliases import Aliases - return u"{0}/{1}".format(Aliases.RESOURCE_PATH, self.name) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + return "{0}/{1}".format(Aliases.RESOURCE_PATH, self.name) + + def retrieve(self) -> AliasSchema: + response: AliasSchema = self.api_call.get( + self._endpoint_path, entity_type=AliasSchema, as_json=True + ) + return response + + def delete(self) -> AliasSchema: + response = self.api_call.delete(self._endpoint_path, entity_type=AliasSchema) - def delete(self): - return self.api_call.delete(self._endpoint_path()) + return response From f77498c7289e552c4348cc33230d16cd18420883 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 6 Aug 2024 11:08:43 +0300 Subject: [PATCH 094/288] test(alias): add tests for alias class Implement test suite for `Alias` class: - Cover initialization, retrieval, deletion of aliases - Include both mock and actual server tests - Utilize new type annotations and schemas --- tests/alias_test.py | 110 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 tests/alias_test.py diff --git a/tests/alias_test.py b/tests/alias_test.py new file mode 100644 index 0000000..e1b514c --- /dev/null +++ b/tests/alias_test.py @@ -0,0 +1,110 @@ +"""Tests for the Alias class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.alias import Alias +from typesense.aliases import Aliases +from typesense.api_call import ApiCall +from typesense.types.alias import AliasSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Alias object is initialized correctly.""" + alias = Alias(fake_api_call, "company_alias") + + assert alias.name == "company_alias" + assert_match_object(alias.api_call, fake_api_call) + assert_object_lists_match(alias.api_call.nodes, fake_api_call.nodes) + assert_match_object( + alias.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert alias._endpoint_path == "/aliases/company_alias" # noqa: WPS437 + + +def test_retrieve(fake_alias: Alias) -> None: + """Test that the Alias object can retrieve an alias.""" + json_response: AliasSchema = { + "collection_name": "companies", + "name": "company_alias", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/aliases/company_alias", + json=json_response, + ) + + response = fake_alias.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url == "http://nearest:8108/aliases/company_alias" + ) + assert response == json_response + + +def test_delete(fake_alias: Alias) -> None: + """Test that the Alias object can delete an alias.""" + json_response: AliasSchema = { + "collection_name": "companies", + "name": "company_alias", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/aliases/company_alias", + json=json_response, + ) + + response = fake_alias.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url == "http://nearest:8108/aliases/company_alias" + ) + assert response == json_response + + +def test_actual_retrieve( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the Alias object can retrieve an alias from Typesense Server.""" + response = actual_aliases["company_alias"].retrieve() + + assert response["collection_name"] == "companies" + assert response["name"] == "company_alias" + + assert_to_contain_object( + response, + { + "collection_name": "companies", + "name": "company_alias", + }, + ) + + +def test_actual_delete( + actual_aliases: Aliases, + delete_all_aliases: None, + delete_all: None, + create_alias: None, +) -> None: + """Test that the Alias object can delete an alias from Typesense Server.""" + response = actual_aliases["company_alias"].delete() + + assert response == { + "collection_name": "companies", + "name": "company_alias", + } From ddd2c451c0edcc50ff898fde01af4cff07068f9a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:29:03 +0300 Subject: [PATCH 095/288] feat(analytics_rule): add types for analytics rules Introduce type definitions for analytics rule types for the Python Client: - Create Event, Source, Destination, and RuleParams types - Add separate schemas for queries and counters - Include typed dictionaries for create, retrieve, and delete operations --- src/typesense/types/analytics_rule.py | 203 ++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) create mode 100644 src/typesense/types/analytics_rule.py diff --git a/src/typesense/types/analytics_rule.py b/src/typesense/types/analytics_rule.py new file mode 100644 index 0000000..af261bc --- /dev/null +++ b/src/typesense/types/analytics_rule.py @@ -0,0 +1,203 @@ +"""Analytics Rule types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class Event(typing.TypedDict): + """ + Schema for analytics rule [events](https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#analytics-query-suggestions). + + Attributes: + type (str): The [type](https://typesense.org/docs/26.0/api/analytics-query-suggestions.html#aggregating-multiple-events) of the event. + + - `click`: Tracking clicks against documents returned in search response.. + - `conversion`: The event is a click. + - `visit`: Tracking page visits to specific documents, useful for recommendations. + + weight (int): The weight of the event. + + name (str): The name of the event. + + """ + + type: typing.Literal["click", "conversion", "visit"] + weight: int + name: str + + +class _Source(typing.TypedDict): + """ + Schema for the source of the analytics rule. + + Attributes: + collections (list[str]): The list of collections. + + events (list[Event]): The list of events. + """ + + collections: typing.List[str] + events: typing.NotRequired[typing.List[Event]] + + +class _SourceForCounters(typing.TypedDict): + """ + Schema for the source of the analytics rule for counter rules. + + Attributes: + collections (list[str]): The list of collections. + + events (list[Event]): The list of events. + """ + + collections: typing.List[str] + events: typing.List[Event] + + +class _Destination(typing.TypedDict): + """ + Schema for the destination of the analytics rule. + + Attributes: + collection (str): The destination collection. + + counter_field (str): The counter field of the collection. + """ + + collection: str + counter_field: typing.NotRequired[str] + + +class _DestinationForCounters(typing.TypedDict): + """ + Schema for the destination of the analytics rule for counter rules. + + Attributes: + collection (str): The destination collection. + + counter_field (str): The counter field of the collection. + """ + + collection: str + counter_field: str + + +class _RuleParams(typing.TypedDict): + """ + Schema for the analytics rule parameters. + + Attributes: + source (_Source): The source of the analytics rule. + + expand_query (bool): Whether to expand the query. + + destination (_Destination): The destination of the analytics rule. + + limit (int): The limit of the analytics rule. + """ + + source: _Source + expand_query: typing.NotRequired[bool] + destination: _Destination + limit: typing.NotRequired[int] + + +class _RuleParamsForCounters(typing.TypedDict): + """ + Schema for the analytics rule parameters for counter rules. + + Attributes: + source (_SourceForCounters): The source of the analytics rule. + + destination (_DestinationForCounters): The destination of the analytics rule. + + limit (int): The limit of the analytics + """ + + source: _SourceForCounters + destination: _DestinationForCounters + limit: typing.NotRequired[int] + + +class RuleCreateSchemaForQueries(typing.TypedDict): + """ + Schema for the request of the AnalyticsRules.create method. + + Attributes: + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + type: typing.Literal["popular_queries", "nohits_queries"] + params: _RuleParams + + +class RuleCreateSchemaForCounters(typing.TypedDict): + """ + Schema for the request of the AnalyticsRules.create method. + + Attributes: + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + type: typing.Literal["counter"] + params: _RuleParamsForCounters + + +class RuleSchemaForQueries(RuleCreateSchemaForQueries): + """ + Schema for the response of the AnalyticsRules.create method. + + Attributes: + name (str): The name of the analytics rule. + + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + name: str + + +class RuleSchemaForCounters(RuleCreateSchemaForCounters): + """ + Schema for the response of the AnalyticsRules.create method. + + Attributes: + name (str): The name of the analytics rule. + + type (str): The type of the analytics rule. + + params (AnalyticsRuleParams): The params of the analytics rule. + """ + + name: str + + +class RuleDeleteSchema(typing.TypedDict): + """ + Schema for the response of the AnalyticsRules.delete method. + + Attributes: + name (str): The name of the analytics rule. + """ + + name: str + + +class RulesRetrieveSchema(typing.TypedDict): + """ + Schema for the response of the AnalyticsRules.retrieve method. + + Attributes: + rules(typing.List[AnalyticsRuleSchema]): The list of analytics rules. + """ + + rules: typing.List[typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]] From 1e7d836535c6134cfa5623039db6dd01fbc7b784 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:33:33 +0300 Subject: [PATCH 096/288] feat(test-utils): add fixtures for analytics rules Introduce new pytest fixtures for analytics rules in `conftest.py`: - Add clear_typesense_analytics_rules fixture - Create fixtures for query collection and analytics rule - Implement fake and actual analytics rules fixtures --- tests/conftest.py | 87 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 697f6c4..420efff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,8 @@ from typesense.alias import Alias from typesense.aliases import Aliases +from typesense.analytics_rule import AnalyticsRule +from typesense.analytics_rules import AnalyticsRules from typesense.api_call import ApiCall from typesense.collection import Collection from typesense.collections import Collections @@ -77,6 +79,23 @@ def clear_typesense_aliases() -> None: alias_name = alias.get("name") delete_url = f"{url}/{alias_name}" delete_response = requests.delete(delete_url, headers=headers) + +@pytest.fixture(scope="function", name="delete_all_analytics_rules") +def clear_typesense_analytics_rules() -> None: + """Remove all analytics_rules from the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + analytics_rules = response.json() + + # Delete each analytics_rule + for analytics_rule_set in analytics_rules["rules"]: + analytics_rule_id = analytics_rule_set.get("name") + delete_url = f"{url}/{analytics_rule_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) delete_response.raise_for_status() @@ -101,6 +120,55 @@ def create_another_collection_fixture() -> None: } response = requests.post(url, headers=headers, json=data) + +@pytest.fixture(scope="function", name="create_query_collection") +def create_query_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + query_collection_data = { + "name": "companies_queries", + "fields": [ + { + "name": "q", + "type": "string", + }, + { + "name": "count", + "type": "int32", + }, + ], + } + + response = requests.post( + url, + headers=headers, + json=query_collection_data, + timeout=3, + ) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_analytics_rule") +def create_analytics_rule_fixture( + create_collection: None, + create_query_collection: None, +) -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + analytics_rule_data = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + } + + response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) response.raise_for_status() @@ -233,6 +301,12 @@ def fake_collections_fixture(fake_api_call: ApiCall) -> Collections: return Collections(fake_api_call) +@pytest.fixture(scope="function", name="fake_analytics_rules") +def fake_analytics_rules_fixture(fake_api_call: ApiCall) -> AnalyticsRules: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRules(fake_api_call) + + @pytest.fixture(scope="function", name="fake_collection") def fake_collection_fixture(fake_api_call: ApiCall) -> Collection: """Return a Collection object with test values.""" @@ -273,3 +347,16 @@ def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: """Return a Collection object with test values.""" return Alias(fake_api_call, "company_alias") +@pytest.fixture(scope="function", name="actual_analytics_rules") +def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: + """Return a AnalyticsRules object using a real API.""" + return AnalyticsRules(actual_api_call) + + + return Keys(actual_api_call) +@pytest.fixture(scope="function", name="fake_analytics_rule") +def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: + """Return a Collection object with test values.""" + return AnalyticsRule(fake_api_call, "company_analytics_rule") + + From df1de4061a4e1aa4d759ec7bab361a3fa336b002 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:41:35 +0300 Subject: [PATCH 097/288] feat(analytics_rules): add type hints to analytics rules class Improve the `AnalyticsRules` class with proper type hints: - Add type annotations to methods and parameters - Update create, upsert, and retrieve methods with specific return types - Refactor __getitem__ for better type safety --- src/typesense/analytics_rules.py | 72 ++++++++++++++++++++++++++------ 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/src/typesense/analytics_rules.py b/src/typesense/analytics_rules.py index 60a747d..5874320 100644 --- a/src/typesense/analytics_rules.py +++ b/src/typesense/analytics_rules.py @@ -1,26 +1,72 @@ +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.analytics_rule import ( + RuleCreateSchemaForQueries, + RuleSchemaForCounters, + RuleSchemaForQueries, + RulesRetrieveSchema, +) + from .analytics_rule import AnalyticsRule class AnalyticsRules(object): - RESOURCE_PATH = '/analytics/rules' + RESOURCE_PATH = "/analytics/rules" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall): self.api_call = api_call - self.rules = {} + self.rules: typing.Dict[str, AnalyticsRule] = {} - def __getitem__(self, rule_id): - if rule_id not in self.rules: + def __getitem__(self, rule_id: str) -> AnalyticsRule: + if not self.rules.get(rule_id): self.rules[rule_id] = AnalyticsRule(self.api_call, rule_id) return self.rules[rule_id] - def create(self, rule, params=None): - params = params or {} - return self.api_call.post(AnalyticsRules.RESOURCE_PATH, rule, params) - - def upsert(self, id, rule): - return self.api_call.put(u"{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, id), rule) + def create( + self, + rule: typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], + params: typing.Union[ + typing.Dict[str, typing.Union[str, int, bool]], None + ] = None, + ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + response: typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries] = ( + self.api_call.post( + AnalyticsRules.RESOURCE_PATH, + body=rule, + params=params, + as_json=True, + entity_type=typing.Union[ + RuleSchemaForCounters, RuleCreateSchemaForQueries + ], + ) + ) + return response - def retrieve(self): - return self.api_call.get(AnalyticsRules.RESOURCE_PATH) + def upsert( + self, + id: str, + rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], + ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + response = self.api_call.put( + "{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, id), + body=rule, + entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], response + ) + def retrieve(self) -> RulesRetrieveSchema: + response: RulesRetrieveSchema = self.api_call.get( + AnalyticsRules.RESOURCE_PATH, + as_json=True, + entity_type=RulesRetrieveSchema, + ) + return response From 4adb8b0221da87b1b6b8a9d6c2bf2639bb95d723 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:43:05 +0300 Subject: [PATCH 098/288] test(analytics_rules): add tests for analytics rules class Implement comprehensive test suite for AnalyticsRules: - Cover initialization, retrieval, creation, and update operations - Include both mock and actual Typesense server tests - Ensure proper type checking and object assertions --- tests/analytics_rules_test.py | 219 ++++++++++++++++++++++++++++++++++ 1 file changed, 219 insertions(+) create mode 100644 tests/analytics_rules_test.py diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py new file mode 100644 index 0000000..dd545f4 --- /dev/null +++ b/tests/analytics_rules_test.py @@ -0,0 +1,219 @@ +"""Tests for the AnalyticsRules class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.analytics_rules import AnalyticsRules +from typesense.api_call import ApiCall +from typesense.types.analytics_rule import ( + RuleCreateSchemaForQueries, + RulesRetrieveSchema, +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRules object is initialized correctly.""" + analytics_rules = AnalyticsRules(fake_api_call) + + assert_match_object(analytics_rules.api_call, fake_api_call) + assert_object_lists_match(analytics_rules.api_call.nodes, fake_api_call.nodes) + assert_match_object( + analytics_rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics_rules.rules + + +def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRules) -> None: + """Test that the AnalyticsRules object can get a missing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_analytics_rules.api_call) + assert_object_lists_match( + analytics_rule.api_call.nodes, + fake_analytics_rules.api_call.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_analytics_rules.api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRules) -> None: + """Test that the AnalyticsRules object can get an existing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + fetched_analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert len(fake_analytics_rules.rules) == 1 + + assert analytics_rule is fetched_analytics_rule + + +def test_retrieve(fake_analytics_rules: AnalyticsRules) -> None: + """Test that the AnalyticsRules object can retrieve analytics_rules.""" + json_response: RulesRetrieveSchema = { + "rules": [ + { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/analytics/rules", + json=json_response, + ) + + response = fake_analytics_rules.retrieve() + + assert len(response) == 1 + assert response["rules"][0] == json_response.get("rules")[0] + assert response == json_response + + +def test_create(fake_analytics_rules: AnalyticsRules) -> None: + """Test that the AnalyticsRules object can create a analytics_rule.""" + json_response: RuleCreateSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + with requests_mock.Mocker() as mock: + mock.post( + "http://nearest:8108/analytics/rules", + json=json_response, + ) + + fake_analytics_rules.create( + rule={ + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + "name": "company_analytics_rule", + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "POST" + assert mock.last_request.url == "http://nearest:8108/analytics/rules" + assert mock.last_request.json() == { + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + "name": "company_analytics_rule", + } + + +def test_actual_create( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_collection: None, + create_query_collection: None, +) -> None: + """Test that the AnalyticsRules object can create an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.create( + rule={ + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +def test_actual_update( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + """Test that the AnalyticsRules object can update an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.upsert( + "company_analytics_rule", + { + "type": "popular_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "popular_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + """Test that the AnalyticsRules object can retrieve the rules from Typesense Server.""" + response = actual_analytics_rules.retrieve() + assert len(response["rules"]) == 1 + assert_match_object( + response["rules"][0], + { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ) From 01325d98361e6df7f5e80dd7b7fcda055bcf2d38 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:44:52 +0300 Subject: [PATCH 099/288] feat(analytics_rule): add type hints to analytics rule class Improve the `AnalyticsRule` class with proper type hints: - Add type annotations to methods and parameters - Update delete and retrieve methods with specific return types --- src/typesense/analytics_rule.py | 39 +++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index 1b7576a..aca950e 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,14 +1,39 @@ +import typing +from urllib import response + +from typesense.api_call import ApiCall +from typesense.types.analytics_rule import ( + RuleDeleteSchema, + RuleSchemaForCounters, + RuleSchemaForQueries, +) + + class AnalyticsRule(object): - def __init__(self, api_call, rule_id): + def __init__(self, api_call: ApiCall, rule_id: str): self.api_call = api_call self.rule_id = rule_id - def _endpoint_path(self): + @property + def _endpoint_path(self) -> str: from .analytics_rules import AnalyticsRules - return u"{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, self.rule_id) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + return "{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, self.rule_id) + + def retrieve( + self, + ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: + response: typing.Union[RuleSchemaForQueries, RuleSchemaForCounters] = ( + self.api_call.get( + self._endpoint_path, + entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + as_json=True, + ) + ) + return response - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> RuleDeleteSchema: + response: RuleDeleteSchema = self.api_call.delete( + self._endpoint_path, entity_type=RuleDeleteSchema + ) + return response From 994f274bd77106166cf981aecc93b83683e0e7f5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 14:48:18 +0300 Subject: [PATCH 100/288] test(analytics_rule): add tests for analytics rule class - Introduce `analytics_rule_test.py` to validate the `AnalyticsRule` class. - Include tests for initializing, retrieving, and deleting analytics rules. - Use `requests_mock` to simulate API interactions for these operations. - Verify correct behavior for both fake and actual Typesense server scenarios. --- src/typesense/analytics_rule.py | 8 ++- tests/analytics_rule_test.py | 117 ++++++++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 2 deletions(-) create mode 100644 tests/analytics_rule_test.py diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index aca950e..b259609 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,5 +1,9 @@ -import typing -from urllib import response +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing from typesense.api_call import ApiCall from typesense.types.analytics_rule import ( diff --git a/tests/analytics_rule_test.py b/tests/analytics_rule_test.py new file mode 100644 index 0000000..afa8a99 --- /dev/null +++ b/tests/analytics_rule_test.py @@ -0,0 +1,117 @@ +"""Tests for the AnalyticsRule class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.analytics_rule import AnalyticsRule +from typesense.analytics_rules import AnalyticsRules +from typesense.api_call import ApiCall +from typesense.types.analytics_rule import RuleDeleteSchema, RuleSchemaForQueries + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRule object is initialized correctly.""" + analytics_rule = AnalyticsRule(fake_api_call, "company_analytics_rule") + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_api_call) + assert_object_lists_match(analytics_rule.api_call.nodes, fake_api_call.nodes) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +def test_retrieve(fake_analytics_rule: AnalyticsRule) -> None: + """Test that the AnalyticsRule object can retrieve an analytics_rule.""" + json_response: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/analytics/rules/company_analytics_rule", + json=json_response, + ) + + response = fake_analytics_rule.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/analytics/rules/company_analytics_rule" + ) + assert response == json_response + + +def test_delete(fake_analytics_rule: AnalyticsRule) -> None: + """Test that the AnalyticsRule object can delete an analytics_rule.""" + json_response: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/analytics/rules/company_analytics_rule", + json=json_response, + ) + + response = fake_analytics_rule.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/analytics/rules/company_analytics_rule" + ) + assert response == json_response + + +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + """Test that the AnalyticsRule object can retrieve a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].retrieve() + + expected: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + assert response == expected + + +def test_actual_delete( + actual_analytics_rules: AnalyticsRules, + delete_all: None, + delete_all_analytics_rules: None, + create_analytics_rule: None, +) -> None: + """Test that the AnalyticsRule object can delete a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].delete() + + expected: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + assert response == expected From 90e0912282b30fad73b548b0a2bf1d38480aa03b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:01:06 +0300 Subject: [PATCH 101/288] feat(analytics): add type hints to analytics class Improve the `Analytics` class with proper type hints: - Add type hint to the `api_call` parameter in the `Analytics` class constructor. --- src/typesense/analytics.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/typesense/analytics.py b/src/typesense/analytics.py index bec4cbf..b29eee0 100644 --- a/src/typesense/analytics.py +++ b/src/typesense/analytics.py @@ -1,6 +1,8 @@ +from typesense.api_call import ApiCall + from .analytics_rules import AnalyticsRules class Analytics(object): - def __init__(self, api_call): + def __init__(self, api_call: ApiCall) -> None: self.rules = AnalyticsRules(api_call) From 8052ce33da052a6f03dc43fa6174c0e686c040d0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:03:21 +0300 Subject: [PATCH 102/288] test(analytics): add tests for analytics class - Introduce `analytics_test.py` to validate the `Analytics` class. - Include tests for initializing analytics. --- tests/analytics_test.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 tests/analytics_test.py diff --git a/tests/analytics_test.py b/tests/analytics_test.py new file mode 100644 index 0000000..11790e5 --- /dev/null +++ b/tests/analytics_test.py @@ -0,0 +1,19 @@ +"""Tests for the Analytics class.""" + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.analytics import Analytics +from typesense.api_call import ApiCall + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Analytics object is initialized correctly.""" + analytics = Analytics(fake_api_call) + + assert_match_object(analytics.rules.api_call, fake_api_call) + assert_object_lists_match(analytics.rules.api_call.nodes, fake_api_call.nodes) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules From 380060bcc38a5f97dad2438970834dab3ec04c48 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:07:17 +0300 Subject: [PATCH 103/288] feat(stopwords): add types for stopwords Add stopword types for Typesense Python Client - Introduce new `Stopword` schemas including creation, retrieval, and deletion. - Implement `StopwordCreateSchema`, `StopwordSchema`, `StopwordsSingleRetrieveSchema`, `StopwordsRetrieveSchema`, and `StopwordDeleteSchema` using `TypedDict`. - Ensure compatibility with Python 3.11 and earlier versions using `typing_extensions`. --- src/typesense/types/stopword.py | 66 +++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 src/typesense/types/stopword.py diff --git a/src/typesense/types/stopword.py b/src/typesense/types/stopword.py new file mode 100644 index 0000000..273adb4 --- /dev/null +++ b/src/typesense/types/stopword.py @@ -0,0 +1,66 @@ +"""Stopword types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class StopwordCreateSchema(typing.TypedDict): + """ + Schema for creating a new stopword. + + Attributes: + stopwords (list[str]): The stopwords to be added. + """ + + stopwords: typing.List[str] + locale: typing.NotRequired[Locales] + + +class StopwordSchema(StopwordCreateSchema): + """ + Schema for a stopword. + + Attributes: + stopwords (list[str]): The stopwords to be added. + """ + + id: str + + +class StopwordsSingleRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving a single stopword. + + Attributes: + stopwords (StopwordSchema): The Stopword. + """ + + stopwords: StopwordSchema + + +class StopwordsRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving stopwords. + + Attributes: + stopwords (list[str]): The list of stopwords. + """ + + stopwords: typing.List[StopwordSchema] + + +class StopwordDeleteSchema(typing.TypedDict): + """ + Response schema for deleting a stopword. + + Attributes: + id (str): The ID of the stopword. + """ + + id: str From 740309d73318c7eb533cf039ef65588a5eb15fe3 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:10:27 +0300 Subject: [PATCH 104/288] feat(test-utils): add fixtures for stopwords Add fixtures for managing stopwords in Typesense tests - Introduce `create_stopword` fixture to create stopwords in the Typesense server. - Add `delete_all_stopwords` fixture for clearing all stopwords from the server. - Implement `actual_stopwords` and `actual_stopwords_set` fixtures to return `Stopwords` and `StopwordsSet` objects using a real API. - Include `fake_stopwords` and `fake_stopwords_set` fixtures for test scenarios using mocked API calls. --- tests/conftest.py | 58 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 420efff..3add0e3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,8 @@ from typesense.configuration import Configuration from typesense.override import Override from typesense.overrides import Overrides +from typesense.stopwords import Stopwords +from typesense.stopwords_set import StopwordsSet from typesense.synonym import Synonym from typesense.synonyms import Synonyms @@ -59,6 +61,19 @@ def create_collection_fixture() -> None: } response = requests.post(url, headers=headers, json=data) +@pytest.fixture(scope="function", name="create_stopword") +def create_stopword_fixture() -> None: + """Create a stopword set in the Typesense server.""" + url = "http://localhost:8108/stopwords/company_stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + stopword_data = { + "stopwords": ["and", "is", "the"], + } + + response = requests.put(url, headers=headers, json=stopword_data, timeout=3) + response.raise_for_status() + + response.raise_for_status() @@ -79,6 +94,25 @@ def clear_typesense_aliases() -> None: alias_name = alias.get("name") delete_url = f"{url}/{alias_name}" delete_response = requests.delete(delete_url, headers=headers) +@pytest.fixture(scope="function", name="delete_all_stopwords") +def clear_typesense_stopwords() -> None: + """Remove all stopwords from the Typesense server.""" + url = "http://localhost:8108/stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + stopwords = response.json() + + # Delete each stopword + for stopword_set in stopwords["stopwords"]: + stopword_id = stopword_set.get("id") + delete_url = f"{url}/{stopword_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + @pytest.fixture(scope="function", name="delete_all_analytics_rules") def clear_typesense_analytics_rules() -> None: @@ -250,6 +284,18 @@ def actual_aliases_fixture(actual_api_call: ApiCall) -> Aliases: return Aliases(actual_api_call) +@pytest.fixture(scope="function", name="actual_stopwords") +def actual_stopwords_fixture(actual_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object using a real API.""" + return Stopwords(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_stopwords_set") +def actual_stopwords_set_fixture(actual_api_call: ApiCall) -> StopwordsSet: + """Return a Stopwords object using a real API.""" + return StopwordsSet(actual_api_call, "company_stopwords") + + @pytest.fixture(scope="function", name="fake_config") def fake_config_fixture() -> Configuration: """Return a Configuration object with test values.""" @@ -347,6 +393,18 @@ def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: """Return a Collection object with test values.""" return Alias(fake_api_call, "company_alias") +@pytest.fixture(scope="function", name="fake_stopwords") +def fake_stopwords_fixture(fake_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object with test values.""" + return Stopwords(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_stopwords_set") +def fake_stopwords_set_fixture(fake_api_call: ApiCall) -> StopwordsSet: + """Return a Collection object with test values.""" + return StopwordsSet(fake_api_call, "company_stopwords") + + @pytest.fixture(scope="function", name="actual_analytics_rules") def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: """Return a AnalyticsRules object using a real API.""" From 8f82556d21809429e50515ca43bf2a27bf0fe3a2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:11:50 +0300 Subject: [PATCH 105/288] feat(stopwords_set): add type hints to stopwords set class Improve the `StopwordsSet` class with proper type hints: - Refactor `StopwordsSet` class to add type annotations for method return values. - Introduce `StopwordSchema` and `StopwordDeleteSchema` types for responses in `retrieve` and `delete` methods, ensuring type safety. - Update method signatures to include the `as_json` and `entity_type` parameters for better response handling. - Improve internal structure by converting `_endpoint_path` to a property for cleaner access. --- src/typesense/stopwords_set.py | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/src/typesense/stopwords_set.py b/src/typesense/stopwords_set.py index 18052d8..ef3a7e6 100644 --- a/src/typesense/stopwords_set.py +++ b/src/typesense/stopwords_set.py @@ -1,14 +1,26 @@ -class StopwordsSet(object): - def __init__(self, api_call, stopwords_set_id): +from typesense.api_call import ApiCall +from typesense.types.stopword import StopwordDeleteSchema, StopwordsSingleRetrieveSchema + + +class StopwordsSet: + def __init__(self, api_call: ApiCall, stopwords_set_id: str) -> None: self.stopwords_set_id = stopwords_set_id self.api_call = api_call - def _endpoint_path(self): + @property + def _endpoint_path(self) -> str: from .stopwords import Stopwords - return u"{0}/{1}".format(Stopwords.RESOURCE_PATH, self.stopwords_set_id) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + return "{0}/{1}".format(Stopwords.RESOURCE_PATH, self.stopwords_set_id) + + def retrieve(self) -> StopwordsSingleRetrieveSchema: + response: StopwordsSingleRetrieveSchema = self.api_call.get( + self._endpoint_path, as_json=True, entity_type=StopwordsSingleRetrieveSchema + ) + return response - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> StopwordDeleteSchema: + response: StopwordDeleteSchema = self.api_call.delete( + self._endpoint_path, entity_type=StopwordDeleteSchema + ) + return response From 2dfec499474d08cedc7b25c9c1b956d6ceafe38d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:13:01 +0300 Subject: [PATCH 106/288] test(stopwords_set): add tests for stopwords set class - Introduce comprehensive tests for the `StopwordsSet` class, covering initialization, retrieval, and deletion of stopword sets. - Utilize `requests_mock` to mock API calls, ensuring consistent test behavior without relying on external dependencies. - Include actual integration tests to verify the `StopwordsSet` operations against a live Typesense server. - Ensure tests validate the correctness of endpoint paths, request methods, and responses for both mocked and actual server interactions. --- tests/stopwords_set_test.py | 98 +++++++++++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 tests/stopwords_set_test.py diff --git a/tests/stopwords_set_test.py b/tests/stopwords_set_test.py new file mode 100644 index 0000000..4bc21f4 --- /dev/null +++ b/tests/stopwords_set_test.py @@ -0,0 +1,98 @@ +"""Tests for the StopwordsSet class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.api_call import ApiCall +from typesense.stopwords import Stopwords +from typesense.stopwords_set import StopwordsSet +from typesense.types.stopword import StopwordDeleteSchema, StopwordSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the StopwordsSet object is initialized correctly.""" + stopword_set = StopwordsSet(fake_api_call, "company_stopwords") + + assert stopword_set.stopwords_set_id == "company_stopwords" + assert_match_object(stopword_set.api_call, fake_api_call) + assert_object_lists_match(stopword_set.api_call.nodes, fake_api_call.nodes) + assert_match_object( + stopword_set.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert stopword_set._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +def test_retrieve(fake_stopwords_set: StopwordsSet) -> None: + """Test that the StopwordsSet object can retrieve an stopword_set.""" + json_response: StopwordSchema = { + "id": "company_stopwords", + "stopwords": ["a", "an", "the"], + } + + with requests_mock.Mocker() as mock: + mock.get( + "/stopwords/company_stopwords", + json=json_response, + ) + + response = fake_stopwords_set.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/stopwords/company_stopwords" + ) + assert response == json_response + + +def test_delete(fake_stopwords_set: StopwordsSet) -> None: + """Test that the StopwordsSet object can delete an stopword_set.""" + json_response: StopwordDeleteSchema = { + "id": "company_stopwords", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/stopwords/company_stopwords", + json=json_response, + ) + + response = fake_stopwords_set.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/stopwords/company_stopwords" + ) + assert response == json_response + + +def test_actual_retrieve( + actual_stopwords: Stopwords, + delete_all_stopwords: None, + delete_all: None, + create_stopword: None, +) -> None: + """Test that the StopwordsSet object can retrieve an stopword_set from Typesense Server.""" + response = actual_stopwords["company_stopwords"].retrieve() + + assert response == { + "stopwords": { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + } + + +def test_actual_delete( + actual_stopwords: Stopwords, + create_stopword: None, +) -> None: + """Test that the StopwordsSet object can delete an stopword_set from Typesense Server.""" + response = actual_stopwords["company_stopwords"].delete() + + assert response == {"id": "company_stopwords"} From c4ecb152778ff4228680b122dc89087dfe0f4f85 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:14:00 +0300 Subject: [PATCH 107/288] feat(stopwords): add type hints to stopwords class - Add type annotations to the `Stopwords` class methods to enhance code clarity and type safety. - Update the `__getitem__` method to ensure proper initialization and retrieval of `StopwordsSet` instances, with type safety and improved dictionary handling. - Refactor the `upsert` and `retrieve` methods to utilize explicit response types, ensuring the API calls return correctly typed data. - Improve the overall structure of API calls by including entity type specification and JSON parsing in the methods. --- src/typesense/stopwords.py | 51 +++++++++++++++++++++++++++++--------- 1 file changed, 39 insertions(+), 12 deletions(-) diff --git a/src/typesense/stopwords.py b/src/typesense/stopwords.py index d67208d..86db41c 100644 --- a/src/typesense/stopwords.py +++ b/src/typesense/stopwords.py @@ -1,21 +1,48 @@ +import sys + +from typesense.api_call import ApiCall +from typesense.types.stopword import ( + StopwordCreateSchema, + StopwordSchema, + StopwordsRetrieveSchema, +) + from .stopwords_set import StopwordsSet +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class Stopwords(object): - RESOURCE_PATH = '/stopwords' + RESOURCE_PATH = "/stopwords" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall): self.api_call = api_call - self.stopwords_sets = {} - - def __getitem__(self, stopwords_set_id): - if stopwords_set_id not in self.stopwords_sets: - self.stopwords_sets[stopwords_set_id] = StopwordsSet(self.api_call, stopwords_set_id) + self.stopwords_sets: typing.Dict[str, StopwordsSet] = {} - return self.stopwords_sets.get(stopwords_set_id) + def __getitem__(self, stopwords_set_id: str) -> StopwordsSet: + if not self.stopwords_sets.get(stopwords_set_id): + self.stopwords_sets[stopwords_set_id] = StopwordsSet( + self.api_call, stopwords_set_id + ) + return self.stopwords_sets[stopwords_set_id] - def upsert(self, stopwords_set_id, stopwords_set): - return self.api_call.put('{}/{}'.format(Stopwords.RESOURCE_PATH, stopwords_set_id), stopwords_set) + def upsert( + self, stopwords_set_id: str, stopwords_set: StopwordCreateSchema + ) -> StopwordSchema: + response: StopwordSchema = self.api_call.put( + "{}/{}".format(Stopwords.RESOURCE_PATH, stopwords_set_id), + body=stopwords_set, + entity_type=StopwordSchema, + ) + return response - def retrieve(self): - return self.api_call.get('{0}'.format(Stopwords.RESOURCE_PATH)) + def retrieve(self) -> StopwordsRetrieveSchema: + response: StopwordsRetrieveSchema = self.api_call.get( + "{0}".format(Stopwords.RESOURCE_PATH), + as_json=True, + entity_type=StopwordsRetrieveSchema, + ) + return response From b6c05c16b998f32e005d7d411c8336c54db601ee Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:15:00 +0300 Subject: [PATCH 108/288] test(stopwords): add tests for stopwords class - Introduce tests to ensure proper initialization of the `Stopwords` class, including API call integration. - Add tests for retrieving, creating, and updating stopwords, with mock requests and actual server interactions. - Validate the functionality of fetching both new and existing stopword sets, ensuring correct behavior of the `Stopwords` class. - Ensure test coverage for all primary methods, including `retrieve` and `upsert`. --- tests/stopwords_test.py | 162 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 tests/stopwords_test.py diff --git a/tests/stopwords_test.py b/tests/stopwords_test.py new file mode 100644 index 0000000..4c12847 --- /dev/null +++ b/tests/stopwords_test.py @@ -0,0 +1,162 @@ +"""Tests for the Stopwords class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.stopwords import Stopwords +from typesense.types.stopword import StopwordSchema, StopwordsRetrieveSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Stopwords object is initialized correctly.""" + stopwords = Stopwords(fake_api_call) + + assert_match_object(stopwords.api_call, fake_api_call) + assert_object_lists_match(stopwords.api_call.nodes, fake_api_call.nodes) + assert_match_object( + stopwords.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not stopwords.stopwords_sets + + +def test_get_missing_stopword(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can get a missing stopword.""" + stopword = fake_stopwords["company_stopwords"] + + assert stopword.stopwords_set_id == "company_stopwords" + assert_match_object(stopword.api_call, fake_stopwords.api_call) + assert_object_lists_match(stopword.api_call.nodes, fake_stopwords.api_call.nodes) + assert_match_object( + stopword.api_call.config.nearest_node, + fake_stopwords.api_call.config.nearest_node, + ) + assert stopword._endpoint_path == "/stopwords/company_stopwords" # noqa: WPS437 + + +def test_get_existing_stopword(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can get an existing stopword.""" + stopword = fake_stopwords["company_stopwords"] + fetched_stopword = fake_stopwords["company_stopwords"] + + assert len(fake_stopwords.stopwords_sets) == 1 + + assert stopword is fetched_stopword + + +def test_retrieve(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can retrieve stopwords.""" + json_response: StopwordsRetrieveSchema = { + "stopwords": [ + { + "id": "company_stopwords", + "locale": "", + "stopwords": ["and", "is", "the"], + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/stopwords", + json=json_response, + ) + + response = fake_stopwords.retrieve() + + assert len(response) == 1 + assert response["stopwords"][0] == json_response["stopwords"][0] + assert response == json_response + + +def test_create(fake_stopwords: Stopwords) -> None: + """Test that the Stopwords object can create a stopword.""" + json_response: StopwordSchema = { + "id": "company_stopwords", + "locale": "", + "stopwords": ["and", "is", "the"], + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/stopwords/company_stopwords", + json=json_response, + ) + + fake_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert ( + mock.last_request.url == "http://nearest:8108/stopwords/company_stopwords" + ) + assert mock.last_request.json() == {"stopwords": ["and", "is", "the"]} + + +def test_actual_create(actual_stopwords: Stopwords, delete_all_stopwords: None) -> None: + """Test that the Stopwords object can create an stopword on Typesense Server.""" + response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + +def test_actual_update( + actual_stopwords: Stopwords, + delete_all_stopwords: None, +) -> None: + """Test that the Stopwords object can update an stopword on Typesense Server.""" + create_response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "the"]}, + ) + + assert create_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + } + + update_response = actual_stopwords.upsert( + "company_stopwords", + {"stopwords": ["and", "is", "other"]}, + ) + + assert update_response == { + "id": "company_stopwords", + "stopwords": ["and", "is", "other"], + } + + +def test_actual_retrieve( + delete_all_stopwords: None, + create_stopword: None, + actual_stopwords: Stopwords, +) -> None: + """Test that the Stopwords object can retrieve an stopword from Typesense Server.""" + response = actual_stopwords.retrieve() + + assert len(response["stopwords"]) == 1 + assert_to_contain_object( + response["stopwords"][0], + { + "id": "company_stopwords", + "stopwords": ["and", "is", "the"], + }, + ) From ee81a0a0423bca7ec5bc3d490865efc9742598ea Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:17:33 +0300 Subject: [PATCH 109/288] feat(operations): add types for operations - Introduce `SnapshotParameters` for specifying snapshot creation details. - Define `LogSlowRequestsTimeParams` to capture parameters for logging slow requests. - Add `HealthCheckResponse` and `OperationResponse` schemas to standardize responses for health checks and general operations. - Use `TypedDict` to ensure type safety and consistency in handling these structures. --- src/typesense/types/operations.py | 52 +++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 src/typesense/types/operations.py diff --git a/src/typesense/types/operations.py b/src/typesense/types/operations.py new file mode 100644 index 0000000..566f517 --- /dev/null +++ b/src/typesense/types/operations.py @@ -0,0 +1,52 @@ +"""Types for operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SnapshotParameters(typing.TypedDict): + """ + Parameters for creating a snapshot. + + Attributes: + snapshot_path (str): The path where the snapshot is stored. + """ + + snapshot_path: str + + +class LogSlowRequestsTimeParams(typing.TypedDict): + """ + Parameters for logging slow requests. + + Attributes: + log_slow_requests_time_ms (int): The time in milliseconds to log slow requests. + """ + + log_slow_requests_time_ms: int + + +class HealthCheckResponse(typing.TypedDict): + """ + Response schema for the health check. + + Attributes: + ok (bool): The status of the health check. + """ + + ok: bool + + +class OperationResponse(typing.TypedDict): + """ + Response schema for operations. + + Attributes: + success (bool): The status of the operation. + """ + + success: bool From 069aadf10026f95eff1e6b7291be94a7c3b143bb Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:19:51 +0300 Subject: [PATCH 110/288] feat(test-utils): add fixtures for operations - Introduce `actual_operations` fixture to create a real `Operations` object using the API. - Add `fake_operations` fixture to provide a mock `Operations` object for testing purposes. - Enhance test coverage for operations by utilizing these fixtures in the test suite. --- tests/conftest.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 3add0e3..c99e88b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,7 @@ from typesense.collection import Collection from typesense.collections import Collections from typesense.configuration import Configuration +from typesense.operations import Operations from typesense.override import Override from typesense.overrides import Overrides from typesense.stopwords import Stopwords @@ -296,6 +297,12 @@ def actual_stopwords_set_fixture(actual_api_call: ApiCall) -> StopwordsSet: return StopwordsSet(actual_api_call, "company_stopwords") +@pytest.fixture(scope="function", name="actual_operations") +def actual_operations_fixture(actual_api_call: ApiCall) -> Operations: + """Return a Operations object using a real API.""" + return Operations(actual_api_call) + + @pytest.fixture(scope="function", name="fake_config") def fake_config_fixture() -> Configuration: """Return a Configuration object with test values.""" @@ -418,3 +425,9 @@ def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: return AnalyticsRule(fake_api_call, "company_analytics_rule") +@pytest.fixture(scope="function", name="fake_operations") +def fake_operations_fixture(fake_api_call: ApiCall) -> Operations: + """Return a Collection object with test values.""" + return Operations(fake_api_call) + + From 086d31e83ae74a366a37709bdecdb000644718cf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:21:33 +0300 Subject: [PATCH 111/288] feat(operations): add type hints to operations class - Enhance type hinting in the `Operations` class methods, including the `perform`, `is_healthy`, and `toggle_slow_request_log` methods. - Consolidate duplicate paths and improve method signatures for better clarity and usability. - Ensure compatibility with Python 3.11 type features. --- src/typesense/operations.py | 107 +++++++++++++++++++++++++++++++----- 1 file changed, 94 insertions(+), 13 deletions(-) diff --git a/src/typesense/operations.py b/src/typesense/operations.py index 46c3e00..a743c39 100644 --- a/src/typesense/operations.py +++ b/src/typesense/operations.py @@ -1,23 +1,104 @@ +import sys -from ctypes import Union +from typesense.types.operations import ( + HealthCheckResponse, + LogSlowRequestsTimeParams, + OperationResponse, + SnapshotParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense import configuration +from typesense.api_call import ApiCall +from typesense.configuration import Configuration class Operations(object): - RESOURCE_PATH = '/operations' - HEALTH_PATH = '/health' + RESOURCE_PATH = "/operations" + HEALTH_PATH = "/health" + CONFIG_PATH = "/config" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall): self.api_call = api_call @staticmethod - def _endpoint_path(operation_name): - return u"{0}/{1}".format(Operations.RESOURCE_PATH, operation_name) + def _endpoint_path(operation_name: str) -> str: + return "{0}/{1}".format(Operations.RESOURCE_PATH, operation_name) + + @typing.overload + def perform( + self, + operation_name: typing.Literal["vote"], + query_params: None = None, + ) -> OperationResponse: ... + + @typing.overload + def perform( + self, + operation_name: typing.Literal["db/compact"], + query_params: None = None, + ) -> OperationResponse: ... + + @typing.overload + def perform( + self, + operation_name: typing.Literal["cache/clear"], + query_params: None = None, + ) -> OperationResponse: ... + + @typing.overload + def perform( + self, + operation_name: str, + query_params: typing.Union[typing.Dict[str, str], None] = None, + ) -> OperationResponse: ... + + @typing.overload + def perform( + self, + operation_name: typing.Literal["snapshot"], + query_params: SnapshotParameters, + ) -> OperationResponse: ... + + def perform( + self, + operation_name: typing.Union[ + typing.Literal["snapshot, vote, db/compact, cache/clear"], str + ], + query_params: typing.Union[ + SnapshotParameters, typing.Dict[str, str], None + ] = None, + ) -> OperationResponse: + response: OperationResponse = self.api_call.post( + self._endpoint_path(operation_name), + params=query_params, + as_json=True, + entity_type=OperationResponse, + ) + return response + def is_healthy(self) -> bool: + call_resp = self.api_call.get( + Operations.HEALTH_PATH, as_json=True, entity_type=HealthCheckResponse + ) + if isinstance(call_resp, typing.Dict): + is_ok: bool = call_resp.get("ok", False) + else: + is_ok = False + return is_ok - def perform(self, operation_name, query_params=None): - query_params = query_params or {} - return self.api_call.post(self._endpoint_path(operation_name), {}, query_params) - - def is_healthy(self)-> bool: - call_resp = self.api_call.get(Operations.HEALTH_PATH) - return call_resp.get("ok", False) if isinstance(call_resp, dict) else False + def toggle_slow_request_log( + self, data: LogSlowRequestsTimeParams + ) -> typing.Dict[str, typing.Union[str, bool]]: + data_dashed = {key.replace("_", "-"): value for key, value in data.items()} + response: typing.Dict[str, typing.Union[str, bool]] = self.api_call.post( + Operations.CONFIG_PATH, + as_json=True, + entity_type=typing.Dict[str, typing.Union[str, bool]], + body=data_dashed, + ) + return response From 8179a40f2f01d0026ef6ab18d755168494846ace Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:22:37 +0300 Subject: [PATCH 112/288] test(operations): add tests for operations class - Introduce comprehensive tests for the `Operations` class, verifying the behavior of methods such as `perform`, `is_healthy`, and `toggle_slow_request_log`. - Ensure correct handling of both valid and invalid operations, improving test coverage and reliability of the Operations API. --- tests/operations_test.py | 92 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 tests/operations_test.py diff --git a/tests/operations_test.py b/tests/operations_test.py new file mode 100644 index 0000000..12b5853 --- /dev/null +++ b/tests/operations_test.py @@ -0,0 +1,92 @@ +"""Tests for the Operations class.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.api_call import ApiCall +from typesense.exceptions import ObjectNotFound +from typesense.operations import Operations + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Override object is initialized correctly.""" + operations = Operations(fake_api_call) + + assert_match_object(operations.api_call, fake_api_call) + assert_object_lists_match(operations.api_call.nodes, fake_api_call.nodes) + assert_match_object( + operations.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + operations._endpoint_path("resource") == "/operations/resource" # noqa: WPS437 + ) + + +def test_vote(actual_operations: Operations) -> None: + """Test that the Operations object can perform the vote operation.""" + response = actual_operations.perform("vote") + + # It will error on single node clusters if asserted to True + assert response["success"] is not None + + +def test_db_compact(actual_operations: Operations) -> None: + """Test that the Operations object can perform the db/compact operation.""" + response = actual_operations.perform("db/compact") + + assert response["success"] + + +def test_cache_clear(actual_operations: Operations) -> None: + """Test that the Operations object can perform the cache/clear operation.""" + response = actual_operations.perform("cache/clear") + + assert response["success"] + + +def test_snapshot(actual_operations: Operations) -> None: + """Test that the Operations object can perform the snapshot operation.""" + response = actual_operations.perform( + "snapshot", + {"snapshot_path": "/tmp"}, # noqa: S108 + ) + + assert response["success"] + + +def test_health(actual_operations: Operations) -> None: + """Test that the Operations object can perform the health operation.""" + response = actual_operations.is_healthy() + + assert response + + +def test_health_not_dict(fake_operations: Operations) -> None: + """Test that the Operations object can perform the health operation.""" + with requests_mock.Mocker() as mock: + mock.get( + "/health", + json="ok", + ) + + response = fake_operations.is_healthy() + assert not response + + +def test_log_slow_requests_time_ms(actual_operations: Operations) -> None: + """Test that the Operations object can perform the log_slow_requests_time_ms operation.""" + response = actual_operations.toggle_slow_request_log( + {"log_slow_requests_time_ms": 100}, + ) + + assert response["success"] + + +def test_invalid_operation(actual_operations: Operations) -> None: + """Test that the Operations object throws an error for an invalid operation.""" + with pytest.raises(ObjectNotFound): + actual_operations.perform("invalid") From f9541e65cf23f52c53648c9e02fdd10a0cec3fe4 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:46:18 +0300 Subject: [PATCH 113/288] feat(keys): add types for api keys - Introduce typing for API key actions, including collections, documents, and synonyms. - Define schemas for creating, retrieving, and deleting API keys, ensuring type safety and clarity in the API key management process. --- src/typesense/types/key.py | 164 +++++++++++++++++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 src/typesense/types/key.py diff --git a/src/typesense/types/key.py b/src/typesense/types/key.py new file mode 100644 index 0000000..51cb2a0 --- /dev/null +++ b/src/typesense/types/key.py @@ -0,0 +1,164 @@ +"""Types for API keys.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_CollectionActions = typing.Literal[ + "collections:list", + "collections:get", + "collections:delete", + "collections:create", + "collections:*", +] + +_DocumentActions = typing.Literal[ + "documents:*", + "documents:export", + "documents:import", + "documents:delete", + "documents:update", + "documents:upsert", + "documents:create", + "documents:get", + "documents:search", +] + +_AliasActions = typing.Literal[ + "aliases:*", + "aliases:delete", + "aliases:create", + "aliases:get", + "aliases:list", +] + +_SynonymActions = typing.Literal[ + "synonyms:*", + "synonyms:delete", + "synonyms:create", + "synonyms:get", + "synonyms:list", +] + +_OverrideActions = typing.Literal[ + "overrides:*", + "overrides:delete", + "overrides:create", + "overrides:get", + "overrides:list", +] + +_StopwordActions = typing.Literal[ + "stopwords:*", + "stopwords:delete", + "stopwords:create", + "stopwords:get", + "stopwords:list", +] + +_KeyActions = typing.Literal[ + "keys:*", + "keys:delete", + "keys:create", + "keys:get", + "keys:list", +] + +_MiscActions = typing.Literal[ + "*", + "debug:list", + "stats.json:list", + "metrics.json:list", +] + +_Actions = typing.Union[ + _CollectionActions, + _DocumentActions, + _AliasActions, + _SynonymActions, + _OverrideActions, + _StopwordActions, + _KeyActions, + _MiscActions, +] + + +class ApiKeyCreateSchema(typing.TypedDict): + """ + Schema for creating a [new API key](https://typesense.org/docs/26.0/api/api-keys.html#create-an-api-key). + + Attributes: + actions (list[str]): The actions allowed for this key. + collections (list[str]): The collections this key has access to. + description (str): The description for this key. + value (str): The value of the key. + expires_at (int): The time in UNIX timestamp format when the key will expire. + autodelete (bool): Whether the key should be deleted after it expires. + """ + + actions: typing.List[_Actions] + collections: typing.List[str] + description: str + value: typing.NotRequired[str] + expires_at: typing.NotRequired[int] + autodelete: typing.NotRequired[bool] + + +class ApiKeyCreateResponseSchema(ApiKeyCreateSchema): + """ + Response schema for creating a [new API key](https://typesense.org/docs/26.0/api/api-keys.html#create-an-api-key). + + Attributes: + id (int): The ID of the key. + + Plus all the attributes from `ApiKeyCreateSchema`. + """ + + id: int + + +class ApiKeySchema(typing.TypedDict): + """ + Response schema for an [API key](https://typesense.org/docs/26.0/api/api-keys.html#retrieve-an-api-key). + + Attributes: + actions (list[str]): The actions allowed for this key. + collections (list[str]): The collections this key has access to. + description (str): The description for this key. + id (int): The ID of the key. + value_prefix (str): The value prefix of the key. + expires_at (int): The time in UNIX timestamp format when the key + """ + + actions: typing.List[_Actions] + collections: typing.List[str] + description: str + id: int + value_prefix: str + expires_at: int + + +class ApiKeyRetrieveSchema(typing.TypedDict): + """ + Response schema for retrieving [API keys](https://typesense.org/docs/26.0/api/api-keys.html#list-all-keys). + + Attributes: + keys (list[ApiKeySchema]): The list of keys. + """ + + keys: typing.List[ApiKeySchema] + + +class ApiKeyDeleteSchema(typing.TypedDict): + """ + Response schema for deleting an [API key](https://typesense.org/docs/26.0/api/api-keys.html#delete-api-key). + + Attributes: + id (int): The ID of the key. + """ + + id: int From 0dff0380e928a9870ce120c5973ada0978d3b82e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:47:49 +0300 Subject: [PATCH 114/288] feat(test-utils): add fixtures for api keys - Add fixtures for creating, deleting, and managing API keys in the Typesense server. - Improve test setup for keys to facilitate better isolation and management of key-related tests. --- tests/conftest.py | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index c99e88b..74d5283 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -11,6 +11,8 @@ from typesense.collection import Collection from typesense.collections import Collections from typesense.configuration import Configuration +from typesense.key import Key +from typesense.keys import Keys from typesense.operations import Operations from typesense.override import Override from typesense.overrides import Overrides @@ -75,7 +77,21 @@ def create_stopword_fixture() -> None: response.raise_for_status() +@pytest.fixture(scope="function", name="create_key_id") +def create_key_fixture() -> int: + """Create a key set in the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + api_key_data = { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + } + + response = requests.post(url, headers=headers, json=api_key_data, timeout=3) response.raise_for_status() + key_id: int = response.json()["id"] + return key_id @pytest.fixture(scope="function", name="delete_all_aliases") @@ -114,6 +130,25 @@ def clear_typesense_stopwords() -> None: delete_response.raise_for_status() +@pytest.fixture(scope="function", name="delete_all_keys") +def clear_typesense_keys() -> None: + """Remove all keys from the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + keys = response.json() + + # Delete each key + for key in keys["keys"]: + key_name = key.get("id") + delete_url = f"{url}/{key_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + @pytest.fixture(scope="function", name="delete_all_analytics_rules") def clear_typesense_analytics_rules() -> None: @@ -418,7 +453,12 @@ def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: return AnalyticsRules(actual_api_call) +@pytest.fixture(scope="function", name="actual_keys") +def actual_keys_fixture(actual_api_call: ApiCall) -> Keys: + """Return a Keys object using a real API.""" return Keys(actual_api_call) + + @pytest.fixture(scope="function", name="fake_analytics_rule") def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: """Return a Collection object with test values.""" @@ -431,3 +471,15 @@ def fake_operations_fixture(fake_api_call: ApiCall) -> Operations: return Operations(fake_api_call) +@pytest.fixture(scope="function", name="fake_keys") +def fake_keys_fixture(fake_api_call: ApiCall) -> Keys: + """Return a AnalyticsRule object with test values.""" + return Keys(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_key") +def fake_key_fixture(fake_api_call: ApiCall) -> Key: + """Return a Key object with test values.""" + return Key(fake_api_call, 1) + + From b67739b9a789ab2106ae012bf61c40ede7f9fa7d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:48:45 +0300 Subject: [PATCH 115/288] feat(keys): add type hints to keys class - Update method signatures in the Keys class to include appropriate type hints for better code clarity and improved IDE support. - Enhance the implementation of `generate_scoped_search_key` and `retrieve` methods to align with Python type annotation standards. --- src/typesense/keys.py | 56 ++++++++++++++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 14 deletions(-) diff --git a/src/typesense/keys.py b/src/typesense/keys.py index ea903dc..a183be5 100644 --- a/src/typesense/keys.py +++ b/src/typesense/keys.py @@ -2,35 +2,63 @@ import hashlib import hmac import json +import sys + +from typesense import key +from typesense.api_call import ApiCall +from typesense.types.document import GenerateScopedSearchKeyParams +from typesense.types.key import ( + ApiKeyCreateResponseSchema, + ApiKeyCreateSchema, + ApiKeyRetrieveSchema, + ApiKeySchema, +) from .key import Key +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class Keys(object): - RESOURCE_PATH = '/keys' + RESOURCE_PATH = "/keys" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - self.keys = {} + self.keys: typing.Dict[int, Key] = {} - def __getitem__(self, key_id): - if key_id not in self.keys: + def __getitem__(self, key_id: int) -> Key: + if not self.keys.get(key_id): self.keys[key_id] = Key(self.api_call, key_id) - return self.keys.get(key_id) + return self.keys[key_id] - def create(self, schema): - return self.api_call.post(Keys.RESOURCE_PATH, schema) + def create(self, schema: ApiKeyCreateSchema) -> ApiKeyCreateResponseSchema: + response: ApiKeySchema = self.api_call.post( + Keys.RESOURCE_PATH, as_json=True, body=schema, entity_type=ApiKeySchema + ) + return response - def generate_scoped_search_key(self, search_key, parameters): + def generate_scoped_search_key( + self, search_key: str, parameters: GenerateScopedSearchKeyParams + ) -> bytes: # Note: only a key generated with the `documents:search` action will be accepted by the server params_str = json.dumps(parameters) digest = base64.b64encode( - hmac.new(search_key.encode('utf-8'), params_str.encode('utf-8'), digestmod=hashlib.sha256).digest() + hmac.new( + search_key.encode("utf-8"), + params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest() ) key_prefix = search_key[0:4] - raw_scoped_key = '{}{}{}'.format(digest.decode('utf-8'), key_prefix, params_str) - return base64.b64encode(raw_scoped_key.encode('utf-8')) + raw_scoped_key = "{}{}{}".format(digest.decode("utf-8"), key_prefix, params_str) + return base64.b64encode(raw_scoped_key.encode("utf-8")) - def retrieve(self): - return self.api_call.get('{0}'.format(Keys.RESOURCE_PATH)) + def retrieve(self) -> ApiKeyRetrieveSchema: + response: ApiKeyRetrieveSchema = self.api_call.get( + Keys.RESOURCE_PATH, entity_type=ApiKeyRetrieveSchema, as_json=True + ) + return response From 0b7b4956cbd48019093de01da155d0ab3a772cc1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:49:39 +0300 Subject: [PATCH 116/288] test(keys): add tests for keys class - Implement comprehensive unit tests for the `Keys` class, covering initialization, retrieval, creation, and key generation functionalities. These tests ensure robust validation of the Keys class methods against the Typesense API behavior. --- tests/keys_test.py | 204 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 204 insertions(+) create mode 100644 tests/keys_test.py diff --git a/tests/keys_test.py b/tests/keys_test.py new file mode 100644 index 0000000..b3538c2 --- /dev/null +++ b/tests/keys_test.py @@ -0,0 +1,204 @@ +"""Tests for the Keys class.""" + +from __future__ import annotations + +import base64 +import hashlib +import hmac +import json +import time + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.keys import Keys +from typesense.types.key import ApiKeyRetrieveSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Keys object is initialized correctly.""" + keys = Keys(fake_api_call) + + assert_match_object(keys.api_call, fake_api_call) + assert_object_lists_match(keys.api_call.nodes, fake_api_call.nodes) + assert_match_object( + keys.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not keys.keys + + +def test_get_missing_key(fake_keys: Keys) -> None: + """Test that the Keys object can get a missing key.""" + key = fake_keys[1] + + assert_match_object(key.api_call, fake_keys.api_call) + assert_object_lists_match(key.api_call.nodes, fake_keys.api_call.nodes) + assert_match_object( + key.api_call.config.nearest_node, + fake_keys.api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/1" # noqa: WPS437 + + +def test_get_existing_key(fake_keys: Keys) -> None: + """Test that the Keys object can get an existing key.""" + key = fake_keys[1] + fetched_key = fake_keys[1] + + assert len(fake_keys.keys) == 1 + + assert key is fetched_key + + +def test_retrieve(fake_keys: Keys) -> None: + """Test that the Keys object can retrieve keys.""" + json_response: ApiKeyRetrieveSchema = { + "keys": [ + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "expires_at": int(time.time()) + 3600, + "id": 1, + "value_prefix": "asdf", + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/keys", + json=json_response, + ) + + response = fake_keys.retrieve() + + assert len(response) == 1 + assert response["keys"][0] == json_response.get("keys")[0] + assert response == json_response + + +def test_create(fake_keys: Keys) -> None: + """Test that the Keys object can create a key.""" + json_response: ApiKeyRetrieveSchema = { + "keys": [ + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "expires_at": int(time.time()) + 3600, + "id": 1, + "value_prefix": "asdf", + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.post( + "http://nearest:8108/keys", + json=json_response, + ) + + fake_keys.create( + schema={ + "actions": ["documents:search"], + "collections": ["companies"], + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "POST" + assert mock.last_request.url == "http://nearest:8108/keys" + assert mock.last_request.json() == { + "actions": ["documents:search"], + "collections": ["companies"], + } + + +def test_actual_create( + actual_keys: Keys, +) -> None: + """Test that the Keys object can create an key on Typesense Server.""" + response = actual_keys.create( + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + }, + ) + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + }, + ) + + +def test_actual_retrieve( + actual_keys: Keys, + delete_all: None, + delete_all_keys: None, + create_key_id: int, +) -> None: + """Test that the Keys object can retrieve an key from Typesense Server.""" + response = actual_keys.retrieve() + assert len(response["keys"]) == 1 + assert_to_contain_object( + response["keys"][0], + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "autodelete": False, + "id": create_key_id, + }, + ) + + +def test_generate_scoped_search_key( + fake_keys: Keys, +) -> None: + """Test that the Keys object can generate a scoped search key.""" + # Use a real key that works on Typesense server + search_key = "KmacipDKNqAM3YiigXfw5pZvNOrPQUba" + search_parameters = { + "q": "search query", + "collection": "companies", + "filter_by": "num_employees:>10", + } + + key = fake_keys.generate_scoped_search_key(search_key, search_parameters) + + decoded_key = base64.b64decode(key).decode("utf-8") + + extracted_key = { + "digest": decoded_key[:44], + "key_prefix": decoded_key[44:48], + "params_str": decoded_key[48:], + } + assert extracted_key["key_prefix"] == search_key[:4] + + expected_params_str = json.dumps(search_parameters) + assert extracted_key["params_str"] == expected_params_str + + recomputed_digest = base64.b64encode( + hmac.new( + search_key.encode("utf-8"), + expected_params_str.encode("utf-8"), + digestmod=hashlib.sha256, + ).digest(), + ).decode("utf-8") + + assert extracted_key["digest"] == recomputed_digest From b6dcbb2f8f697d13993be47c463d728b887e4eef Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:50:39 +0300 Subject: [PATCH 117/288] feat(key): add type hints to key class - Update the Key class to include type hints for better clarity and maintenance. - Change the `_endpoint_path` method to a property for a more Pythonic approach, enhancing the code readability and consistency. --- src/typesense/key.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/src/typesense/key.py b/src/typesense/key.py index d750598..12d3ab6 100644 --- a/src/typesense/key.py +++ b/src/typesense/key.py @@ -1,16 +1,26 @@ +from typesense.api_call import ApiCall +from typesense.types.key import ApiKeyDeleteSchema, ApiKeySchema class Key(object): - def __init__(self, api_call, key_id): + def __init__(self, api_call: ApiCall, key_id: int) -> None: self.key_id = key_id self.api_call = api_call - def _endpoint_path(self): + @property + def _endpoint_path(self) -> str: from .keys import Keys - return u"{0}/{1}".format(Keys.RESOURCE_PATH, self.key_id) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + return "{0}/{1}".format(Keys.RESOURCE_PATH, self.key_id) - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def retrieve(self) -> ApiKeySchema: + response: ApiKeySchema = self.api_call.get( + self._endpoint_path, as_json=True, entity_type=ApiKeySchema + ) + return response + + def delete(self) -> ApiKeyDeleteSchema: + resposne: ApiKeyDeleteSchema = self.api_call.delete( + self._endpoint_path, entity_type=ApiKeyDeleteSchema + ) + return resposne From 71fc68817244f46a04b376b089f8f93e6c0d188f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:51:37 +0300 Subject: [PATCH 118/288] test(key): add tests for key class - Implement tests for initializing, retrieving, and deleting keys in the Key class. - These tests ensure correct interaction with the Typesense API and validate expected behavior of the Key class methods. --- tests/key_test.py | 100 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 tests/key_test.py diff --git a/tests/key_test.py b/tests/key_test.py new file mode 100644 index 0000000..4603f94 --- /dev/null +++ b/tests/key_test.py @@ -0,0 +1,100 @@ +"""Tests for the Key class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.key import Key +from typesense.keys import Keys +from typesense.types.key import ApiKeyDeleteSchema, ApiKeySchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Key object is initialized correctly.""" + key = Key(fake_api_call, 3) + + assert key.key_id == 3 + assert_match_object(key.api_call, fake_api_call) + assert_object_lists_match(key.api_call.nodes, fake_api_call.nodes) + assert_match_object( + key.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert key._endpoint_path == "/keys/3" # noqa: WPS437 + + +def test_retrieve(fake_key: Key) -> None: + """Test that the Key object can retrieve an key.""" + json_response: ApiKeySchema = { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/keys/1", + json=json_response, + ) + + response = fake_key.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert mock.request_history[0].url == "http://nearest:8108/keys/1" + assert response == json_response + + +def test_delete(fake_key: Key) -> None: + """Test that the Key object can delete an key.""" + json_response: ApiKeyDeleteSchema = {"id": 1} + with requests_mock.Mocker() as mock: + mock.delete( + "/keys/1", + json=json_response, + ) + + response = fake_key.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert mock.request_history[0].url == "http://nearest:8108/keys/1" + assert response == json_response + + +def test_actual_retrieve( + actual_keys: Keys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the Key object can retrieve an key from Typesense Server.""" + response = actual_keys[create_key_id].retrieve() + + assert_to_contain_object( + response, + { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + "id": create_key_id, + }, + ) + + +def test_actual_delete( + actual_keys: Keys, + delete_all_keys: None, + delete_all: None, + create_key_id: int, +) -> None: + """Test that the Key object can delete an key from Typesense Server.""" + response = actual_keys[create_key_id].delete() + + assert response == {"id": create_key_id} From b79ba63d6ecae93f53c14b471890fada5be9290b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 15:59:21 +0300 Subject: [PATCH 119/288] refactor(test-utils): use typing module explicitely for object assertions - Enhance type hinting in the object assertions utility functions. - Improve compatibility with different Python versions. --- tests/utils/object_assertions.py | 36 +++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 12 deletions(-) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py index 06ae183..a7af614 100644 --- a/tests/utils/object_assertions.py +++ b/tests/utils/object_assertions.py @@ -3,12 +3,20 @@ from __future__ import annotations import difflib -from typing import Any, Counter, Iterable, TypeVar +import sys -TObj = TypeVar("TObj", bound=object) +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing -def obj_to_dict(input_obj: TObj | dict[str, Any]) -> dict[str, Any]: +TObj = typing.TypeVar("TObj", bound=object) + + +def obj_to_dict( + input_obj: typing.Union[TObj, typing.Dict[str, typing.Any]], +) -> typing.Dict[str, typing.Any]: """ Convert an object to a dictionary. @@ -20,10 +28,13 @@ def obj_to_dict(input_obj: TObj | dict[str, Any]) -> dict[str, Any]: Returns: The object as a dictionary. """ - return input_obj if isinstance(input_obj, dict) else input_obj.__dict__ + return input_obj if isinstance(input_obj, typing.Dict) else input_obj.__dict__ -def assert_match_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: +def assert_match_object( + actual: typing.Union[TObj, typing.Dict[str, typing.Any]], + expected: typing.Union[TObj, typing.Dict[str, typing.Any]], +) -> None: """ Assert that two objects have the same attribute values. @@ -46,7 +57,8 @@ def assert_match_object(actual: TObj, expected: TObj | dict[str, Any]) -> None: def assert_to_contain_object( - actual: TObj | dict[str, Any], expected: TObj | dict[str, Any] + actual: typing.Union[TObj, typing.Dict[str, typing.Any]], + expected: typing.Union[TObj, typing.Dict[str, typing.Any]], ) -> None: """Assert that two objects have the same attribute values.""" actual_attrs = obj_to_dict(actual) @@ -61,17 +73,17 @@ def assert_to_contain_object( def assert_object_lists_match( - actual: list[TObj], - expected: list[TObj | dict[str, Any]], + actual: typing.List[TObj], + expected: typing.List[typing.Union[TObj, typing.Dict[str, typing.Any]]], ) -> None: """Assert that two lists of objects have the same attribute values.""" actual_dicts = [obj_to_dict(actual_obj) for actual_obj in actual] expected_dicts = [obj_to_dict(expected_obj) for expected_obj in expected] - actual_counter = Counter( + actual_counter = typing.Counter( tuple(sorted(dict_entry.items())) for dict_entry in actual_dicts ) - expected_counter = Counter( + expected_counter = typing.Counter( tuple(sorted(dict_entry.items())) for dict_entry in expected_dicts ) if actual_counter != expected_counter: @@ -79,8 +91,8 @@ def assert_object_lists_match( def raise_with_diff( - expected_dicts: Iterable[dict[str, Any]], - actual_dicts: Iterable[dict[str, Any]], + expected_dicts: typing.Sequence[dict[str, typing.Any]], + actual_dicts: typing.Sequence[dict[str, typing.Any]], ) -> None: """ Raise an AssertionError with a unified diff of the expected and actual values. From 9b24a6e9e94e450ebaef254ff45e84a8b65217ed Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:00:11 +0300 Subject: [PATCH 120/288] fix(test-utils): fix check for input object type before accessing dict --- tests/utils/object_assertions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py index a7af614..5312f61 100644 --- a/tests/utils/object_assertions.py +++ b/tests/utils/object_assertions.py @@ -45,7 +45,7 @@ def assert_match_object( Raises: AssertionError: If the objects do not have the same attribute values. """ - actual_attrs = actual.__dict__ + actual_attrs = obj_to_dict(actual) expected_attrs = obj_to_dict(expected) From 7fdecef87b604f17beeb8a14b62ec0fb19925b13 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:00:53 +0300 Subject: [PATCH 121/288] style(test-utils): format util functions based on black config --- tests/utils/object_assertions.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py index 5312f61..9664f7f 100644 --- a/tests/utils/object_assertions.py +++ b/tests/utils/object_assertions.py @@ -106,9 +106,9 @@ def raise_with_diff( diff = difflib.unified_diff( expected_str, actual_str, - fromfile='expected', - tofile='actual', - lineterm='', + fromfile="expected", + tofile="actual", + lineterm="", ) - diff_output = '\n'.join(diff) + diff_output = "\n".join(diff) raise AssertionError(f"Lists do not contain the same elements:\n{diff_output}") From 453ce1eb55149bde83f8fc32f06f3e810de6147d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:01:27 +0300 Subject: [PATCH 122/288] feat(test-utils): add key existence assertion for dicts - Implement `assert_to_contain_keys` function to check if specified keys exist in the actual dictionary. - Improve error messaging to provide clear feedback when expected keys are missing. --- tests/utils/object_assertions.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/utils/object_assertions.py b/tests/utils/object_assertions.py index 9664f7f..a74fb51 100644 --- a/tests/utils/object_assertions.py +++ b/tests/utils/object_assertions.py @@ -56,6 +56,15 @@ def assert_match_object( raise_with_diff([{key: expected_attrs[key]}], [{key: actual_attrs[key]}]) +def assert_to_contain_keys( + actual: typing.Dict[str, typing.Any], + keys: typing.List[str], +) -> None: + """Assert that the actual dictionary contains the expected keys.""" + for key in keys: + assert key in actual, f"Key {key} not found in actual dictionary" + + def assert_to_contain_object( actual: typing.Union[TObj, typing.Dict[str, typing.Any]], expected: typing.Union[TObj, typing.Dict[str, typing.Any]], From 14d5b1869411ee11527969f3b9e90047d4263e7c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:47:43 +0300 Subject: [PATCH 123/288] feat(preprocess): add types for preprocess operations - Update the `stringify` function to improve compatibility with Python versions above 3.11. - Introduce type aliases for clearer type definitions of parameters and their string representations. --- src/typesense/preprocess.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py index 9e139e2..10a36ef 100644 --- a/src/typesense/preprocess.py +++ b/src/typesense/preprocess.py @@ -1,7 +1,30 @@ def stringify_search_params(params): return {key:stringify(val) for key, val in params.items()} +import sys def stringify(val): + +if sys.version_info > (3, 11): + import typing +else: + import typing_extensions as typing + + +_ListTypes = typing.List[typing.Union[str, int, bool]] + +_Types = typing.Union[int, str, bool] + +ParamSchema: typing.TypeAlias = typing.Dict[ + str, + typing.Union[ + _Types, + _ListTypes, + ], +] + + +StringifiedParamSchema: typing.TypeAlias = typing.Dict[str, str] + if isinstance(val, bool) or isinstance(val, int): return str(val).lower() else: From 9ece97104db139225cbf7d6e5f25f8ec05ad7fa6 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:49:30 +0300 Subject: [PATCH 124/288] feat(preprocess): validate stringify function - Add validation to the `stringify` function to ensure input values are of type string, integer, or boolean, raising an `InvalidParameter` exception for invalid types. --- src/typesense/preprocess.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py index 10a36ef..bb730ac 100644 --- a/src/typesense/preprocess.py +++ b/src/typesense/preprocess.py @@ -1,8 +1,6 @@ -def stringify_search_params(params): - return {key:stringify(val) for key, val in params.items()} import sys -def stringify(val): +from typesense.exceptions import InvalidParameter if sys.version_info > (3, 11): import typing @@ -25,7 +23,13 @@ def stringify(val): StringifiedParamSchema: typing.TypeAlias = typing.Dict[str, str] + +def stringify(val: _Types) -> str: + if not isinstance(val, (str, int, bool)): + raise InvalidParameter(f"Value {val} is not a string, integer, or boolean.") if isinstance(val, bool) or isinstance(val, int): return str(val).lower() else: - return val \ No newline at end of file + return val + + From 485668682551e08c73a4d5af77ca64632b9c571d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:51:11 +0300 Subject: [PATCH 125/288] feat(preprocess): add parameter processing and stringification functions - Implement `process_param_list` to concatenate a list of parameters into a string. - Enhance `stringify_search_params` to handle lists of parameters. - Ensure all values are validated and converted to strings. - Raise an `InvalidParameter` exception for invalid types. --- src/typesense/preprocess.py | 65 +++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py index bb730ac..2a483f3 100644 --- a/src/typesense/preprocess.py +++ b/src/typesense/preprocess.py @@ -33,3 +33,68 @@ def stringify(val: _Types) -> str: return val +def process_param_list( + parammeter_list: typing.List[typing.Union[str, bool, int]], +) -> str: + """ + Concatenate a list of parameters into a string. + + Args: + parammeter_list (typing.List[str | int | bool]): The list of parameters. + + Returns: + str: The concatenated parameters + + Raises: + InvalidParameter: If the value is not a string, integer, or boolean. + + Examples: + >>> process_param_list(["a", "b", "c"]) + "a,b,c" + >>> process_param_list([1, 2, 3]) + "1,2,3" + >>> process_param_list([True, False, True]) + "true,false,true" + >>> process_param_list([True, 1, "c"]) + "true,1,c" + """ + stringified_list = [ + stringify(parameter_element) for parameter_element in parammeter_list + ] + return ",".join(stringified_list) + + +def stringify_search_params(parameter_dict: ParamSchema) -> StringifiedParamSchema: + """ + Convert the search parameters to strings. + + Args: + parameter_dict (ParamSchema): The search parameters. + + Returns: + StringifiedParamSchema: The search parameters as strings. + + Raises: + InvalidParameter: If a value is not a string, integer, or boolean. + + Examples: + >>> stringify_search_params({"a": 1, "b": "c", "d": True}) + {"a": "1", "b": "c", "d": "true"} + >>> stringify_search_params({"a": [1, 2, 3], "b": ["c", "d", "e"]}) + {"a": "1,2,3", "b": "c,d,e"} + >>> stringify_search_params({"a": [True, False, True], "b": [1, 2, 3]}) + {"a": "true,false,true", "b": "1,2,3"} + """ + stringified_params = {} + + for key, param_value in parameter_dict.items(): + if isinstance(param_value, list): + stringified_params[key] = process_param_list(param_value) + elif isinstance(param_value, (bool, int, str)): + stringified_params[key] = stringify(param_value) + else: + raise InvalidParameter( + f"Value {param_value} is not a string, integer, or boolean", + ) + + return stringified_params From f9cd595299688fe4bfc3cb05dc982d1e8d1c7e9c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 15:33:44 +0300 Subject: [PATCH 126/288] test(preprocess): add tests for preprocess module Add unit tests for the `preprocess` module - Introduce tests for `stringify`, `process_param_list`, and `stringify_search_params` functions. - Ensure the tests cover various data types such as strings, booleans, integers, floats, lists, and dictionaries. - Validate that invalid inputs raise the appropriate`InvalidParameter` exceptions. --- tests/preprocess_test.py | 83 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 tests/preprocess_test.py diff --git a/tests/preprocess_test.py b/tests/preprocess_test.py new file mode 100644 index 0000000..4e081ad --- /dev/null +++ b/tests/preprocess_test.py @@ -0,0 +1,83 @@ +"""Tests for the preprocess module.""" + +import pytest + +from typesense import exceptions +from typesense.preprocess import ( + ParamSchema, + process_param_list, + stringify, + stringify_search_params, +) + + +def test_stringify_str() -> None: + """Test that the function can stringify a string.""" + assert stringify("string") == "string" + + +def test_stringify_bool() -> None: + """Test that the function can stringify a boolean.""" + assert stringify(True) == "true" + + +def test_stringify_int() -> None: + """Test that the function can stringify an integer.""" + assert stringify(42) == "42" + + +def test_stringify_float() -> None: + """Test that the function can stringify a float.""" + with pytest.raises(exceptions.InvalidParameter): + stringify(3.15) + + +def test_stringify_list() -> None: + """Test that the function can stringify a list.""" + with pytest.raises(exceptions.InvalidParameter): + stringify([1, 2, 3]) + + +def test_concat_string_list() -> None: + """Test that the function can concatenate a list of strings.""" + assert process_param_list(["a", "b", "c"]) == "a,b,c" + + +def test_concat_bool_list() -> None: + """Test that the function can concatenate a list of booleans.""" + assert process_param_list([True, False, True]) == "true,false,true" + + +def test_concat_int_list() -> None: + """Test that the function can concatenate a list of integers.""" + assert process_param_list([1, 2, 3]) == "1,2,3" + + +def test_concat_list_list() -> None: + """Test that the function can concatenate a list of lists.""" + with pytest.raises(exceptions.InvalidParameter): + process_param_list([[1, 2], [3, 4], [5, 6]]) + + +def test_concat_params() -> None: + """Test that the function can concatenate a dictionary of parameters.""" + test_params: ParamSchema = { + "one": "one", + "two": 2, + "three": True, + "four": [1, 2, 3], + "five": ["one", "two", "three"], + "six": [True, False], + "seven": ["one", 2, True], + } + + processed_params = stringify_search_params(test_params) + assert processed_params == { + "one": "one", + "two": "2", + "three": "true", + "four": "1,2,3", + "five": "one,two,three", + "six": "true,false", + "seven": "one,2,true", + } From f8709d06874c38336ff2667d59351fe17bd942c8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 16:54:34 +0300 Subject: [PATCH 127/288] feat(documents): add types for documents class - Introduced new types and classes to enhance document operations in Typesense. - Added `DirtyValuesParameters` and `DocumentWriteParameters` to manage document writes and dirty value handling effectively. - Implemented `DocumentImportParameters` to support various import scenarios, including returning IDs and documents upon import. - Defined `DocumentExportParameters` for exporting documents with flexible field inclusion/exclusion. - Created a robust set of search and query parameters, including `RequiredSearchParameters`, `QueryParameters`, and various filtering, ranking, sorting, and pagination options. - Enhanced type safety and clarity by leveraging Python's `typing` module, ensuring better code maintainability and usability. - Established a clear schema for expected document structures, facilitating consistent API interactions. --- src/typesense/types/document.py | 851 ++++++++++++++++++++++++++++++++ 1 file changed, 851 insertions(+) create mode 100644 src/typesense/types/document.py diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py new file mode 100644 index 0000000..2bebc85 --- /dev/null +++ b/src/typesense/types/document.py @@ -0,0 +1,851 @@ +"""Types for document operations in Typesense.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +_InfixOperations = typing.Literal["off", "always", "fallback"] +""" +Infix operations for search queries. + +- `off`: infix search is disabled, which is default. +- `always`: infix search is performed along with regular search. +- `fallback`: infix search is performed if regular search does not produce results. +""" + +_SequenceTypes = typing.Union[ + typing.Sequence[int], + typing.Sequence[str], + typing.Sequence[float], +] + +_Types = typing.Union[int, str, float, bool] + +DocumentSchema: typing.TypeAlias = typing.Mapping[ + str, + typing.Union[ + _Types, + _SequenceTypes, + "DocumentSchema", + typing.Sequence["DocumentSchema"], + ], +] +""" +Valid types for a document schema. + +It can be a mapping of a string to any of the following types: + +- `int` +- `str` +- `float` +- `bool` + +Their respective sequences, or a nested schema of the same type. +""" + +TDoc = typing.TypeVar("TDoc", bound="DocumentSchema") + + +class DirtyValuesParameters(typing.TypedDict): + """ + Parameters for handling dirty values in documents. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + dirty_values: typing.NotRequired[ + typing.Literal["coerce_or_reject", "coerce_or_drop", "drop", "reject"] + ] + + +class DocumentWriteParameters(DirtyValuesParameters): + """ + Parameters for writing documents. + + Attributes: + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + action: typing.NotRequired[typing.Literal["create", "update", "upsert", "emplace"]] + + +class UpdateByFilterParameters(typing.TypedDict): + """ + Parameters for updating documents by filter. + + Attributes: + filter_by(str): Filter to apply to documents. + """ + + filter_by: str + + +class UpdateByFilterResponse(typing.TypedDict): + """ + Response from updating documents by filter. + + Attributes: + num_updated(int): Indicates the success of the operation. + """ + + num_updated: int + + +class ImportResponseSuccess(typing.TypedDict): + """ + Response for a successful import operation. + + Attributes: + success(True): Indicates the success of the operation. + """ + + success: typing.Literal[True] + + +class ImportResponseWithDocAndId(typing.Generic[TDoc], ImportResponseSuccess): + """ + Response for a successful import operation with document and id. + + Attributes: + success(True): Indicates the success of the operation. + doc(TDoc): Imported document. + id(str): ID of the imported document. + """ + + id: str + doc: TDoc + + +class ImportResponseWithDoc(typing.Generic[TDoc], ImportResponseSuccess): + """ + Response for a successful import operation with document. + + Attributes: + success(True): Indicates the success of the operation. + doc(TDoc): Imported document. + """ + + doc: TDoc + + +class ImportResponseWithId(ImportResponseSuccess): + """ + Response for a successful import operation with ID. + + Attributes: + success(True): Indicates the success of the operation. + id(str): ID of the imported document. + """ + + id: str + + +class ImportResponseFail(typing.Generic[TDoc], typing.TypedDict): + """ + Response for a failed import operation. + + Attributes: + success (False): Indicates the success of the operation. + error (str): Error message. + code (int): Error code. + document (TDoc): Document that failed to import. + """ + + success: typing.Literal[False] + error: str + code: int + document: TDoc + + +ImportResponse: typing.TypeAlias = typing.Union[ + typing.List[typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]]], + typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]], + typing.List[ + typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] + ], + typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]], +] +"""Set of all possible responses after an import operation.""" + + +class DocumentImportParametersReturnId(DocumentWriteParameters): + """ + Parameters for importing documents with return ID. + + Attributes: + return_id (True): Return the ID of the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_id: typing.Literal[True] + + +class DocumentImportParametersReturnDoc(DocumentWriteParameters): + """ + Parameters for importing documents with return document. + + Attributes: + return_doc (True): Return the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_doc: typing.Literal[True] + + +class DocumentImportParametersReturnDocAndId(DocumentWriteParameters): + """ + Parameters for importing documents with return document and ID. + + Attributes: + return_doc (True): Return the imported document. + return_id (True): Return the ID of the imported document. + action (str): [Action](https://typesense.org/docs/26.0/api/documents.html#action-modes-create-upsert-update-emplace) to perform on the document. + + - `create`: Creates a new document. Fails if a document with the same id + already exists (default). + - `upsert`: Creates a new document or updates an existing document if a + document with the same id already exists. Requires the whole document to be sent. + For partial updates, use the update action below. + - `update`: Updates an existing document. Fails if a document with the + given id does not exist. You can send a partial document containing only the + fields that are to be updated. + - `emplace`: Creates a new document or updates an existing document if a + document with the same id already exists. You can send either the whole document + or a partial document for update. + + dirty_values (str): [Handling of dirty values](https://typesense.org/docs/26.0/api/documents.html#dealing-with-dirty-data) in the document. + + - `coerce_or_reject`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, reject the write outright with an error message. + - `coerce_or_drop`: Attempt coercion of the field's value to previously inferred type. + If coercion fails, drop the particular field and index the rest of the document. + - `drop`: Drop the particular field and index the rest of the document. + - `reject`: Reject the write outright with an error message. + """ + + return_doc: typing.Literal[True] + return_id: typing.Literal[True] + + +DocumentImportParameters: typing.TypeAlias = typing.Union[ + DocumentImportParametersReturnId, + DocumentImportParametersReturnDoc, + DocumentImportParametersReturnDocAndId, +] +"""Set of all possible parameters for importing documents.""" + + +class DocumentExportParameters(typing.TypedDict): + """ + Parameters for [exporting documents](https://typesense.org/docs/26.0/api/documents.html#export-documents). + + Attributes: + filter_by (str): Filter to apply to documents. + include_fields (str): Fields to include in the exported documents. + exclude_fields (str): Fields to exclude from the exported documents. + """ + + filter_by: typing.NotRequired[str] + include_fields: typing.NotRequired[str] + exclude_fields: typing.NotRequired[str] + + +class RequiredSearchParameters(typing.TypedDict): + """ + Required parameters for searching documents. + + Attributes: + q (str): Query string to search for. + query_by (str): Field to search in. + """ + + q: str + query_by: str + + +class QueryParameters(typing.TypedDict): + """ + Parameters [regarding queries](https://typesense.org/docs/26.0/api/search.html#query-parameters). + + Attributes: + prefix (str, bool, list[bool]): Prefix operations for search queries. + infix (InfixOperations, list[InfixOperations]): Infix operations for search queries. + pre_segmented_query (bool): Indicates whether the query is pre-segmented. + preset (str): Preset for search queries. + vector_query (str): Vector query for search. + voice_query (str): Voice query for search. + """ + + prefix: typing.NotRequired[typing.Union[str, bool, typing.List[bool]]] + infix: typing.NotRequired[ + typing.Union[_InfixOperations, typing.List[_InfixOperations]] + ] + pre_segmented_query: typing.NotRequired[bool] + preset: typing.NotRequired[str] + vector_query: typing.NotRequired[str] + voice_query: typing.NotRequired[str] + + +class FilterParameters(typing.TypedDict): + """ + Parameters regarding [filtering search responses](https://typesense.org/docs/26.0/api/search.html#filter-parameters). + + Attributes: + filter_by (str): Filter to apply to search results. + enable_lazy_filter (bool): Enable lazy filtering. + """ + + filter_by: typing.NotRequired[str] + enable_lazy_filter: typing.NotRequired[bool] + + +class RankingAndSortingParameters(typing.TypedDict): + """ + Parameters regarding [ranking and sorting search results](https://typesense.org/docs/26.0/api/search.html#ranking-and-sorting-parameters). + + Attributes: + query_by_weights (str, list[int]): Weights to apply to query fields. + text_match_type (str): Type of text match to prioritize. + + - `max_score`: Prioritize maximum score. + - `max_weight`: Prioritize maximum weight. + + sort_by (str, list[str]): Fields to sort search results by in order specified. + prioritize_exact_match (bool): Prioritize exact matches. + prioritize_token_position (bool): Prioritize token position. + prioritize_num_matching_fields (bool): Prioritize number of matching fields. + pinned_hits (dict[str, list[str]]): Pinned hits to prioritize. + hidden_hits (dict[str, list[str]]): Hidden hits to deprioritize. + enable_overrides (bool): Enable overrides. + override_tags (str, list[str]): Tags to override. + max_candidates (int): Maximum number of candidates to return. + """ + + query_by_weights: typing.NotRequired[typing.Union[str, typing.List[int]]] + text_match_type: typing.NotRequired[typing.Literal["max_score", "max_weight"]] + sort_by: typing.NotRequired[typing.Union[str, typing.List[str]]] + prioritize_exact_match: typing.NotRequired[bool] + prioritize_token_position: typing.NotRequired[bool] + prioritize_num_matching_fields: typing.NotRequired[bool] + pinned_hits: typing.NotRequired[typing.Dict[str, typing.List[str]]] + hidden_hits: typing.NotRequired[typing.Dict[str, typing.List[str]]] + enable_overrides: typing.NotRequired[bool] + override_tags: typing.NotRequired[typing.Union[str, typing.List[str]]] + max_candidates: typing.NotRequired[int] + + +class PaginationParameters(typing.TypedDict): + """ + Parameters regarding [pagination of search results](https://typesense.org/docs/26.0/api/search.html#pagination-parameters). + + Attributes: + page (int): Page number to retrieve. + per_page (int): Number of results per page. + offset (int): Offset to start retrieving results from. + limit (int): Limit of results to retrieve. + """ + + page: typing.NotRequired[int] + per_page: typing.NotRequired[int] + offset: typing.NotRequired[int] + limit: typing.NotRequired[int] + + +class FacetingParameters(typing.TypedDict): + """ + Parameters regarding [faceting search results](https://typesense.org/docs/26.0/api/search.html#faceting-parameters). + + Attributes: + facet_by (str, list[str]): Field to facet by. + max_facet_values (int): Maximum number of facet values to return. + facet_query (str): Query to facet by. + facet_query_num_typos (int): Number of typos to allow in facet query. + facet_return_parent (str): Return parent of facet. + facet_sample_percent (int): Sample percentage of facet values to return. + facet_sample_threshold (int): Sample threshold of facet values to return. + """ + + facet_by: typing.NotRequired[typing.Union[str, typing.List[str]]] + max_facet_values: typing.NotRequired[int] + facet_query: typing.NotRequired[str] + facet_query_num_typos: typing.NotRequired[int] + facet_return_parent: typing.NotRequired[str] + facet_sample_percent: typing.NotRequired[int] + facet_sample_threshold: typing.NotRequired[int] + + +class GroupingParameters(typing.TypedDict): + """ + Parameters regarding [grouping search results](https://typesense.org/docs/26.0/api/search.html#grouping-parameters). + + Attributes: + group_by (str): Field to group by. + group_limit (int): Limit of groups to return. + group_missing_values (bool): Include missing values in groups. + """ + + group_by: typing.NotRequired[str] + group_limit: typing.NotRequired[int] + group_missing_values: typing.NotRequired[bool] + + +class ResultsParameters(typing.TypedDict): + """ + Parameters regarding [search results](https://typesense.org/docs/26.0/api/search.html#results-parameters). + + Attributes: + include_fields (str, list[str]): Fields to include in search results. + exclude_fields (str, list[str]): Fields to exclude from search results. + highlight_fields (str, list[str]): Fields to highlight in search results. + highlight_full_fields (str, list[str]): Fields to highlight fully in search results. + highlight_affix_num_tokens (int): The number of tokens that should surround the highlighted text on each side. + highlight_start_tag (str): Start tag for highlighting. + highlight_end_tag (str): End tag for highlighting. + enable_highlight_v1 (bool): Flag for disabling the deprecated, old highlight structure in the response. + snippet_threshold (int): Field values under this length will be fully highlighted, instead of showing a snippet of relevant portion. + limit_hits (int): Limit the number of hits to return. + search_cutoff_ms (int): Search cutoff time in milliseconds. + exhaustive_search (bool): Perform exhaustive search. + """ + + include_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + exclude_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + highlight_fields: typing.NotRequired[ + typing.Union[typing.Literal["none"], str, typing.List[str]] + ] + highlight_full_fields: typing.NotRequired[ + typing.Union[typing.Literal["none"], str, typing.List[str]] + ] + highlight_affix_num_tokens: typing.NotRequired[int] + highlight_start_tag: typing.NotRequired[str] + highlight_end_tag: typing.NotRequired[str] + enable_highlight_v1: typing.NotRequired[bool] + snippet_threshold: typing.NotRequired[int] + limit_hits: typing.NotRequired[int] + search_cutoff_ms: typing.NotRequired[int] + exhaustive_search: typing.NotRequired[bool] + + +class TypoToleranceParameters(typing.TypedDict): + """ + Parameters regarding [typo tolerance in search results](https://typesense.org/docs/26.0/api/search.html#typo-tolerance-parameters). + + Attributes: + num_typos (int): Number of typos to allow in search results. + min_len_1typo (int): Minimum length of query to allow one typo. + min_len_2typo (int): Minimum length of query to allow two typos. + split_join_tokens (str): Treat space as a typo. + typo_tokens_threshold (int): Threshold for typo tokens. + drop_tokens_threshold (int): Threshold for dropping tokens. + drop_tokens_mode (str): Mode for dropping tokens. + + + - `right_to_left`: Drop tokens from right to left (default). + - `left_to_right`: Drop tokens from left to right. + - `both_sides:3`: Drop tokens from both sides with a threshold of 3. + Afterwards, drops back to the default right to left. + """ + + num_typos: typing.NotRequired[int] + min_len_1typo: typing.NotRequired[int] + min_len_2typo: typing.NotRequired[int] + split_join_tokens: typing.NotRequired[typing.Literal["off", "fallback", "always"]] + typo_tokens_threshold: typing.NotRequired[int] + drop_tokens_threshold: typing.NotRequired[int] + drop_tokens_mode: typing.NotRequired[ + typing.Literal["right_to_left", "left_to_right", "both_sides:3"] + ] + enable_typos_for_numerical_tokens: typing.NotRequired[bool] + + +class CachingParameters(typing.TypedDict): + """ + Parameters regarding [caching search results](https://typesense.org/docs/26.0/api/search.html#caching-parameters). + + Attributes: + use_cache (bool): Use cache for search results. + cache_ttl (int): The duration (in seconds) that determines how long the search query is cached. + """ + + use_cache: typing.NotRequired[bool] + cache_ttl: typing.NotRequired[int] + + +class SearchParameters( + RequiredSearchParameters, + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, +): + """Parameters for searching documents.""" + + +class MultiSearchParameters(SearchParameters): + """ + Parameters for performing a [Federated/Multi-Search](https://typesense.org/docs/26.0/api/federated-multi-search.html#federated-multi-search). + + Attributes: + collection (str): Collection to search in. + + Plus all the parameters from `SearchParameters`. + """ + + collection: str + + +class MultiSearchCommonParameters( + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, +): + """ + [Query parameters](https://typesense.org/docs/26.0/api/federated-multi-search.html#multi-search-parameters) for multi-search. + + Attributes: + query_by (str): Field to search in. + limit_multi_searches (int): Limit the number of multi-searches. + x_typesense_api_key (str): API key for Typesense. + + You can also use any of the parameters from `SearchParameters`. + """ + + query_by: typing.NotRequired[str] + limit_multi_searches: typing.NotRequired[int] + x_typesense_api_key: typing.NotRequired[str] + + +class GenerateScopedSearchKeyParams( + QueryParameters, + FilterParameters, + RankingAndSortingParameters, + PaginationParameters, + FacetingParameters, + GroupingParameters, + ResultsParameters, + TypoToleranceParameters, + CachingParameters, +): + """ + Parameters for generating a [scoped search key](https://typesense.org/docs/26.0/api/api-keys.html#generate-scoped-search-key). + + Attributes: + q (str): Query string to search for. + query_by (str): Field to search in. + filter_by (str): Filter to apply to search results. + expires_at (int): Expiry time (in UNIX timestamp format) for the scoped search key. + limit_multi_searches (int): Limit the number of multi-searches. + + You can also embed any of the parameters from `SearchParameters`. + """ + + q: typing.NotRequired[str] + query_by: typing.NotRequired[str] + expires_at: typing.NotRequired[int] + limit_multi_searches: typing.NotRequired[int] + + +class FacetCountSchema(typing.TypedDict): + """ + Schema for facet count. + + Attributes: + count (int): Number of occurrences of the facet value. + value (str): Value of the facet. + highlighted (str): Highlighted value of the facet. + """ + + count: int + value: str + highlighted: str + + +class FacetCountStats(typing.TypedDict): + """ + Statistics for facet count. + + Attributes: + min (float): Minimum value of the facet. + max (float): Maximum value of the facet. + avg (float): Average value of the facet. + sum (float): Sum of the facet values. + total_values (int): Total number of values. + """ + + min: typing.NotRequired[float] + max: typing.NotRequired[float] + avg: typing.NotRequired[float] + sum: typing.NotRequired[float] + total_values: typing.NotRequired[int] + + +class SearchResponseFacetCountSchema(typing.TypedDict): + """ + Schema for the search response facet count. + + Attributes: + counts (list[FacetCountSchema]): List of facet counts. + field_name (str): Name of the field. + stats (FacetCountStats): Statistics for the facet count. + """ + + counts: typing.List[FacetCountSchema] + field_name: str + stats: FacetCountStats + + +class Highlight(typing.TypedDict): + """ + Schema for highlighting search results. + + Attributes: + matched_tokens (list[str]): List of matched tokens. + snippet (str): Snippet of the matched tokens. + value (str): Value of the matched tokens. + """ + + matched_tokens: typing.List[str] + snippet: str + value: str + + +class HighlightExtended(Highlight): + """ + Extended schema for highlighting search results. + + Attributes: + field (str): Field to highlight. + + Plus all the parameters from `Highlight`. + """ + + field: str + + +class TextMatchInfo(typing.TypedDict): + """ + Schema for text match information. + + Attributes: + best_field_score (str): Best field score. + best_field_weight (int): Best field weight. + fields_matched (int): Number of fields matched. + score (str): Score of the text match. + typo_prefix_score (int): Typo prefix score. + num_tokens_dropped (int): Number of tokens dropped. + tokens_matched (int): Number of tokens matched. + """ + + best_field_score: str + best_field_weight: int + fields_matched: int + score: str + typo_prefix_score: int + num_tokens_dropped: int + tokens_matched: int + + +class Hit(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for a hit in search results. + + Attributes: + document (TDoc): Document in the hit. + highlights (list[HighlightExtended]): List of highlights in the hit. + highlight (dict[str, Highlight]): Dictionary of highlights in the hit. + text_match (int): Text match in the hit. + text_match_info (TextMatchInfo): Text match information in the hit. + """ + + document: TDoc + highlights: typing.List[HighlightExtended] + highlight: typing.Dict[str, Highlight] + text_match: int + text_match_info: TextMatchInfo + + +class GroupedHit(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for grouped hits in search results. + + Attributes: + group_key (list[str]): List of group keys. + hits (list[Hit[TDoc]]): List of hits in the group. + found (int): Number of hits found. + """ + + group_key: typing.List[str] + hits: typing.List[Hit[TDoc]] + found: typing.NotRequired[int] + + +class ConversationHistory(typing.TypedDict): + """ + Schema for a conversation's history in the search results. + + Attributes: + conversation (list[object]): List of conversation objects. + id (str): ID of the conversation. + last_updated (int): Last updated time of the conversation. + ttl (int): Time to live of the conversation. + """ + + conversation: typing.List[object] + id: str + last_updated: int + ttl: int + + +class Conversation(typing.TypedDict): + """ + Schema for a conversation in the search results. + + Attributes: + answer (str): Answer to the query. + conversation_history (ConversationHistory): Conversation history. + conversation_id (str): ID of the conversation. + query (str): Query of the conversation. + """ + + answer: str + conversation_history: ConversationHistory + conversation_id: str + query: str + + +class SearchResponse(typing.Generic[TDoc], typing.TypedDict): + """ + Schema for a search response. + + Attributes: + facet_counts (list[SearchResponseFacetCountSchema]): List of facet counts. + found (int): Number of documents found. + found_docs (int): Number of documents found. + page (int): Page number of the search results. + out_of (int): Number of documents found out of the whole dataset. + search_time_ms (int): Search time in milliseconds. + search_cutoff (bool): Search cutoff. + hits (list[Hit[TDoc]]): List of hits in the search results. + grouped_hits (list[GroupedHit[TDoc]]): List of grouped hits in the search results. + conversation (Conversation): Conversation in the search results. + """ + + facet_counts: typing.List[SearchResponseFacetCountSchema] + found: int + found_docs: typing.NotRequired[int] + page: int + out_of: int + search_time_ms: int + search_cutoff: typing.NotRequired[bool] + hits: typing.List[Hit[TDoc]] + grouped_hits: typing.NotRequired[typing.List[GroupedHit[TDoc]]] + conversation: typing.NotRequired[Conversation] + + +class DeleteQueryParameters(typing.TypedDict): + """ + Parameters for deleting documents. + + Attributes: + filter_by (str): Filter to apply to documents. + batch_size (int): Batch size for deleting documents. + ignore_not_found (bool): Ignore not found documents. + """ + + filter_by: str + batch_size: typing.NotRequired[int] + ignore_not_found: typing.NotRequired[bool] + + +class DeleteResponse(typing.TypedDict): + """ + Response from deleting documents. + + Attributes: + num_deleted (int): Number of documents deleted. + """ + + num_deleted: int From 9d9ec6663c1820de0b8ccfe2331c7fc0139b51f2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 17:00:20 +0300 Subject: [PATCH 128/288] chore: add faker package --- requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements.txt b/requirements.txt index 8595ce9..d06fed1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,7 @@ certifi==2024.7.4 chardet==3.0.4 charset-normalizer==3.3.2 idna==2.8 +Faker==27.4.0 iniconfig==2.0.0 isort==5.13.2 mypy==1.11.0 @@ -9,6 +10,8 @@ mypy-extensions==1.0.0 packaging==24.1 pluggy==1.5.0 pytest==8.3.2 +python-dateutil==2.9.0.post0 requests==2.32.3 +six==1.16.0 typing_extensions==4.12.2 urllib3==2.2.2 From cd27dd185be4532c5a8cdfffba6efbef1d81a98c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 23 Aug 2024 17:04:18 +0300 Subject: [PATCH 129/288] feat(test-utils): add fixtures for documents - Introduce `create_document` fixture to create document objects in the Typesense server for testing. - Implement `generate_companies` fixture to generate a list of fake company data for enhanced testing coverage. --- tests/conftest.py | 62 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 74d5283..989aab5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,8 @@ import pytest import requests +from faker import Faker +from faker.providers import company from typesense.alias import Alias from typesense.aliases import Aliases @@ -11,6 +13,8 @@ from typesense.collection import Collection from typesense.collections import Collections from typesense.configuration import Configuration +from typesense.document import Document +from typesense.documents import Documents from typesense.key import Key from typesense.keys import Keys from typesense.operations import Operations @@ -23,6 +27,9 @@ pytest.register_assert_rewrite("utils.object_assertions") +fake = Faker() +fake.add_provider(company) + @pytest.fixture(scope="function", name="delete_all") def clear_typesense_collections() -> None: @@ -64,6 +71,21 @@ def create_collection_fixture() -> None: } response = requests.post(url, headers=headers, json=data) +@pytest.fixture(scope="function", name="create_document") +def create_document_fixture() -> None: + """Create a document in the Typesense server.""" + url = "http://localhost:8108/collections/companies/documents" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + document_data = { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } + + response = requests.post(url, headers=headers, json=document_data, timeout=3) + response.raise_for_status() + + @pytest.fixture(scope="function", name="create_stopword") def create_stopword_fixture() -> None: """Create a stopword set in the Typesense server.""" @@ -305,6 +327,12 @@ def actual_collections_fixture(actual_api_call: ApiCall) -> Collections: return Collections(actual_api_call) +@pytest.fixture(scope="function", name="actual_documents") +def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: + """Return a Documents object using a real API.""" + return Documents(actual_api_call, "companies") + + @pytest.fixture(scope="function", name="actual_overrides") def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: return Overrides(actual_api_call, "companies") @@ -483,3 +511,37 @@ def fake_key_fixture(fake_api_call: ApiCall) -> Key: return Key(fake_api_call, 1) +@pytest.fixture(scope="function", name="fake_documents") +def fake_documents_fixture(fake_api_call: ApiCall) -> Documents: + """Return a Documents object with test values.""" + return Documents(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_document") +def fake_document_fixture(fake_api_call: ApiCall) -> Document: + """Return a Document object with test values.""" + return Document(fake_api_call, "companies", "0") + + +class Company(typing.TypedDict): + """Company data type.""" + + id: str + company_name: str + num_employees: int + + +@pytest.fixture(scope="function", name="generate_companies") +def generate_companies_fixture() -> typing.List[Company]: + """Generate a list of companies using fake data.""" + companies: typing.List[Company] = [] + for _ in range(50): + companies.append( + { + "id": str(_), + "company_name": fake.company(), + "num_employees": fake.random_int(1, 1000), + }, + ) + + return companies From 4e766851b724bf3603018f2b1f0fdc747c8057fa Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 14:55:06 +0300 Subject: [PATCH 130/288] feat(documents): add type hints to documents class - Add type hints and generic typing to improve code clarity - Update method signatures and return types for better type safety - Implement overloaded methods for `import_` to handle various scenarios - Enhance error handling and response parsing in several methods --- src/typesense/documents.py | 270 ++++++++++++++++++++++++++++++------- 1 file changed, 222 insertions(+), 48 deletions(-) diff --git a/src/typesense/documents.py b/src/typesense/documents.py index 13dacf8..6ed2e3a 100644 --- a/src/typesense/documents.py +++ b/src/typesense/documents.py @@ -1,68 +1,206 @@ +# mypy: disable-error-code="misc" import json +import sys +from typesense.api_call import ApiCall from typesense.exceptions import TypesenseClientError +from typesense.types.document import ( + DeleteQueryParameters, + DeleteResponse, + DirtyValuesParameters, + DocumentExportParameters, + DocumentImportParameters, + DocumentImportParametersReturnDoc, + DocumentImportParametersReturnDocAndId, + DocumentImportParametersReturnId, + DocumentSchema, + DocumentWriteParameters, + ImportResponse, + ImportResponseFail, + ImportResponseSuccess, + ImportResponseWithDoc, + ImportResponseWithDocAndId, + ImportResponseWithId, + SearchParameters, + SearchResponse, + UpdateByFilterParameters, + UpdateByFilterResponse, +) from .document import Document from .logger import logger -from .validation import validate_search from .preprocess import stringify_search_params -from collections.abc import Iterable +from .validation import validate_search + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing -class Documents(object): - RESOURCE_PATH = 'documents' +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) - def __init__(self, api_call, collection_name): + +class Documents(typing.Generic[TDoc]): + RESOURCE_PATH = "documents" + + def __init__(self, api_call: ApiCall, collection_name: str) -> None: self.api_call = api_call self.collection_name = collection_name - self.documents = {} + self.documents: typing.Dict[str, Document[TDoc]] = {} - def __getitem__(self, document_id): + def __getitem__(self, document_id: str) -> Document[TDoc]: if document_id not in self.documents: - self.documents[document_id] = Document(self.api_call, self.collection_name, document_id) + self.documents[document_id] = Document( + self.api_call, self.collection_name, document_id + ) return self.documents[document_id] - def _endpoint_path(self, action=None): + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: from .collections import Collections - action = action or '' - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Documents.RESOURCE_PATH, - action) + action = action or "" + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Documents.RESOURCE_PATH, + action, + ) - def create(self, document, params=None): + def create( + self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + ) -> TDoc: params = params or {} - params['action'] = 'create' - return self.api_call.post(self._endpoint_path(), document, params) + params["action"] = "create" + response = self.api_call.post( + self._endpoint_path(), + body=document, + params=params, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) - def create_many(self, documents, params=None): - logger.warning('`create_many` is deprecated: please use `import_`.') + def create_many( + self, + documents: typing.List[TDoc], + params: typing.Union[DirtyValuesParameters, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + logger.warning("`create_many` is deprecated: please use `import_`.") return self.import_(documents, params) - def upsert(self, document, params=None): + def upsert( + self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + ) -> TDoc: params = params or {} - params['action'] = 'upsert' - return self.api_call.post(self._endpoint_path(), document, params) + params["action"] = "upsert" + response = self.api_call.post( + self._endpoint_path(), + body=document, + params=params, + as_json=True, + entity_type=typing.Dict[str, str], + ) + return typing.cast(TDoc, response) - def update(self, document, params=None): + def update( + self, + document: TDoc, + params: typing.Union[UpdateByFilterParameters, None] = None, + ) -> UpdateByFilterResponse: params = params or {} - params['action'] = 'update' - return self.api_call.patch(self._endpoint_path(), document, params) + params["action"] = "update" + response: UpdateByFilterResponse = self.api_call.patch( + self._endpoint_path(), + body=document, + params=params, + entity_type=UpdateByFilterResponse, + ) + return response - def import_jsonl(self, documents_jsonl): - logger.warning('`import_jsonl` is deprecated: please use `import_`.') + def import_jsonl(self, documents_jsonl: str) -> str: + logger.warning("`import_jsonl` is deprecated: please use `import_`.") return self.import_(documents_jsonl) - # `documents` can be either a list of document objects (or) - # JSONL-formatted string containing multiple documents - def import_(self, documents, params=None, batch_size=None): - if isinstance(documents, Iterable) and not isinstance(documents, (str, bytes)): + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + params: DocumentImportParametersReturnDocAndId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + params: DocumentImportParametersReturnId, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + params: typing.Union[DocumentWriteParameters, None] = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + params: DocumentImportParametersReturnDoc, + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ + typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]] + ]: ... + + @typing.overload + def import_( + self, + documents: typing.List[TDoc], + params: typing.Union[ + DocumentImportParameters, + None, + ], + batch_size: typing.Union[int, None] = None, + ) -> typing.List[ImportResponse[TDoc]]: ... + + @typing.overload + def import_( + self, + documents: typing.Union[bytes, str], + params: typing.Union[ + DocumentImportParameters, + None, + ] = None, + batch_size: typing.Union[int, None] = None, + ) -> str: ... + + # Actual implementation that matches the overloads + def import_( + self, + documents: typing.Union[bytes, str, typing.List[TDoc]], + params: typing.Union[ + DocumentImportParameters, + None, + ] = None, + batch_size: typing.Union[int, None] = None, + ) -> typing.Union[ + ImportResponse[TDoc], + str, + ]: + if not isinstance(documents, (str, bytes)): if batch_size: - response_objs = [] - batch = [] + response_objs: ImportResponse[TDoc] = [] + batch: typing.List[TDoc] = [] for document in documents: batch.append(document) - if (len(batch) == batch_size): - api_response = self.import_(batch, params) + if len(batch) == batch_size: + api_response = self.import_(documents=batch, params=params) response_objs.extend(api_response) batch = [] if batch: @@ -70,38 +208,74 @@ def import_(self, documents, params=None, batch_size=None): response_objs.extend(api_response) else: - document_strs = [] + document_strs: typing.List[str] = [] for document in documents: document_strs.append(json.dumps(document)) if len(document_strs) == 0: - raise TypesenseClientError(f"Cannot import an empty list of documents.") + raise TypesenseClientError( + f"Cannot import an empty list of documents." + ) - docs_import = '\n'.join(document_strs) - api_response = self.api_call.post(self._endpoint_path('import'), docs_import, params, as_json=False) - res_obj_strs = api_response.split('\n') + docs_import = "\n".join(document_strs) + res = self.api_call.post( + self._endpoint_path("import"), + body=docs_import, + params=params, + entity_type=str, + as_json=False, + ) + res_obj_strs = res.split("\n") response_objs = [] for res_obj_str in res_obj_strs: try: - res_obj_json = json.loads(res_obj_str) + res_obj_json: typing.Union[ + ImportResponseWithDocAndId[TDoc], + ImportResponseWithDoc[TDoc], + ImportResponseWithId, + ImportResponseSuccess, + ImportResponseFail[TDoc], + ] = json.loads(res_obj_str) except json.JSONDecodeError as e: - raise TypesenseClientError(f"Invalid response - {res_obj_str}") from e + raise TypesenseClientError( + f"Invalid response - {res_obj_str}" + ) from e response_objs.append(res_obj_json) return response_objs else: - api_response = self.api_call.post(self._endpoint_path('import'), documents, params, as_json=False) + api_response = self.api_call.post( + self._endpoint_path("import"), + body=documents, + params=params, + as_json=False, + entity_type=str, + ) return api_response - def export(self, params=None): - api_response = self.api_call.get(self._endpoint_path('export'), params, as_json=False) + def export( + self, params: typing.Union[DocumentExportParameters, None] = None + ) -> str: + api_response: str = self.api_call.get( + self._endpoint_path("export"), params=params, as_json=False, entity_type=str + ) return api_response - def search(self, search_parameters): + def search(self, search_parameters: SearchParameters) -> SearchResponse[TDoc]: stringified_search_params = stringify_search_params(search_parameters) - validate_search(stringified_search_params) - return self.api_call.get(self._endpoint_path('search'), stringified_search_params) + response: SearchResponse[TDoc] = self.api_call.get( + self._endpoint_path("search"), + params=stringified_search_params, + entity_type=SearchResponse, + as_json=True, + ) + return response - def delete(self, params=None): - return self.api_call.delete(self._endpoint_path(), params) + def delete( + self, params: typing.Union[DeleteQueryParameters, None] = None + ) -> DeleteResponse: + response: DeleteResponse = self.api_call.delete( + self._endpoint_path(), params=params, entity_type=DeleteResponse + ) + return response From ff287e38f0ecb408a440c540ef10339355e976f1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 14:56:11 +0300 Subject: [PATCH 131/288] test(documents): add tests for documents class - Introduce unit tests for various `Documents` class functionalities, including: - Document creation, upsertion, updating, deletion, and importing. - Cover edge cases like: - Missing documents. - Invalid JSON. - Empty imports. - Verify deprecation warnings for legacy methods: - `create_many`. - `import_jsonl`. - Enhance test coverage and ensure reliability of `Documents` class. - Validate error handling and compatibility with different Python versions. --- tests/documents_test.py | 470 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 470 insertions(+) create mode 100644 tests/documents_test.py diff --git a/tests/documents_test.py b/tests/documents_test.py new file mode 100644 index 0000000..29b66d7 --- /dev/null +++ b/tests/documents_test.py @@ -0,0 +1,470 @@ +"""Tests for the Documents class.""" + +import json +import logging +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +import pytest +from pytest_mock import MockFixture + +from tests.conftest import Company +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.api_call import ApiCall +from typesense.documents import Documents +from typesense.exceptions import InvalidParameter, TypesenseClientError + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Documents object is initialized correctly.""" + documents = Documents(fake_api_call, "companies") + + assert_match_object(documents.api_call, fake_api_call) + assert_object_lists_match(documents.api_call.nodes, fake_api_call.nodes) + assert_match_object( + documents.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not documents.documents + + +def test_get_missing_document(fake_documents: Documents) -> None: + """Test that the Documents object can get a missing document.""" + document = fake_documents["1"] + + assert_match_object(document.api_call, fake_documents.api_call) + assert_object_lists_match(document.api_call.nodes, fake_documents.api_call.nodes) + assert_match_object( + document.api_call.config.nearest_node, + fake_documents.api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/1" # noqa: WPS437 + ) + + +def test_get_existing_document(fake_documents: Documents) -> None: + """Test that the Documents object can get an existing document.""" + document = fake_documents["1"] + fetched_document = fake_documents["1"] + + assert len(fake_documents.documents) == 1 + + assert document is fetched_document + + +def test_create( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object can create a document on Typesense server.""" + company: Company = { + "company_name": "Typesense", + "id": "1", + "num_employees": 25, + } + spy = mocker.spy(actual_api_call, "post") + response = actual_documents.create(company) + expected = company + assert response == expected + spy.assert_called_once_with( + "/collections/companies/documents/", + body=company, + params={"action": "create"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + +def test_upsert( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object can upsert a document on Typesense server.""" + company: Company = { + "company_name": "company", + "id": "0", + "num_employees": 10, + } + spy = mocker.spy(actual_api_call, "post") + response = actual_documents.upsert(company) + + assert response == company + spy.assert_called_once_with( + "/collections/companies/documents/", + body=company, + params={"action": "upsert"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + updated_company: Company = { + "company_name": "company_updated", + "id": "0", + "num_employees": 10, + } + + response_update = actual_documents.upsert( + updated_company, + {"action": "update"}, + ) + + assert response_update == updated_company + assert spy.call_count == 2 + spy.assert_called_with( + "/collections/companies/documents/", + body=updated_company, + params={"action": "upsert"}, + as_json=True, + entity_type=typing.Dict[str, str], + ) + + +def test_update( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can update a document on Typesense server.""" + response = actual_documents.update( + {"company_name": "company_updated", "num_employees": 10}, + {"filter_by": "company_name:company"}, + ) + + assert response == {"num_updated": 1} + + +def test_create_many( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the Documents object can create many documents on Typesense server.""" + companies: typing.List[Company] = [ + { + "company_name": "Typesense", + "id": "1", + "num_employees": 25, + }, + { + "company_name": "Typesense", + "id": "2", + "num_employees": 25, + }, + ] + with caplog.at_level(logging.WARNING): + response = actual_documents.create_many(companies) + expected = [{"success": True} for _ in companies] + assert response == expected + assert "`create_many` is deprecated: please use `import_`." in caplog.text + + +def test_export( + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can export a document from Typesense server.""" + response = actual_documents.export() + assert response == '{"company_name":"Company","id":"0","num_employees":10}' + + +def test_delete( + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can delete a document from Typesense server.""" + response = actual_documents.delete({"filter_by": "company_name:Company"}) + assert response == {"num_deleted": 1} + + +def test_delete_ignore_missing( + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can ignore a missing document from Typesense server.""" + response = actual_documents.delete( + {"filter_by": "company_name:missing", "ignore_not_found": True}, + ) + assert response == {"num_deleted": 0} + + +def test_import_fail( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object doesn't throw an error when importing documents.""" + wrong_company: Company = {"company_name": "Wrong", "id": "0", "num_employees": 0} + companies = generate_companies + [wrong_company] + request_spy = mocker.spy(actual_documents, "import_") + response = actual_documents.import_(companies) + + expected: typing.List[typing.Dict[str, typing.Union[str, bool, int]]] = [ + {"success": True} for _ in generate_companies + ] + expected.append( + { + "code": 409, + "document": '{"company_name": "Wrong", "id": "0", "num_employees": 0}', + "error": "A document with id 0 already exists.", + "success": False, + }, + ) + assert request_spy.call_count == 1 + assert response == expected + + +def test_import_empty( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object throws when importing an empty list of documents.""" + with pytest.raises(TypesenseClientError): + actual_documents.import_(documents=[]) + + +def test_import_json_fail( + actual_documents: Documents[Company], + generate_companies: typing.List[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object throws when importing invalid JSON.""" + mocker.patch( + "json.loads", + side_effect=json.JSONDecodeError("Expecting value", "doc", 0), + ) + + with pytest.raises(TypesenseClientError): + actual_documents.import_(generate_companies) + + +def test_import_batch_size( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + mocker: MockFixture, +) -> None: + """Test that the Documents object can import documents in batches.""" + batch_size = 5 + document_spy = mocker.spy(actual_documents, "import_") + request_spy = mocker.spy(actual_api_call, "post") + response = actual_documents.import_(generate_companies, batch_size=batch_size) + + expected = [{"success": True} for _ in generate_companies] + assert document_spy.call_count == len(generate_companies) // batch_size + 1 + assert request_spy.call_count == len(generate_companies) // batch_size + assert response == expected + + +def test_import_return_docs( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return documents when importing.""" + response = actual_documents.import_(generate_companies, {"return_doc": True}) + expected = [ + {"success": True, "document": company} for company in generate_companies + ] + assert response == expected + + +def test_import_return_ids( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return document IDs when importing.""" + response = actual_documents.import_(generate_companies, {"return_id": True}) + expected = [ + {"success": True, "id": company.get("id")} for company in generate_companies + ] + assert response == expected + + +def test_import_return_ids_and_docs( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, +) -> None: + """Test that the Documents object can return document IDs and documents when importing.""" + response = actual_documents.import_( + generate_companies, + {"return_id": True, "return_doc": True}, + ) + expected = [ + {"success": True, "document": company, "id": company.get("id")} + for company in generate_companies + ] + assert response == expected + + +def test_import_jsonl( + generate_companies: typing.List[Company], + actual_documents: Documents[Company], + delete_all: None, + create_collection: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that the Documents object can import documents in JSONL format.""" + companies_in_jsonl_format = "\n".join( + [ + "".join( + [ + '{"id": "', + company["id"], + '", ', + '"company_name": "', + company["company_name"], + '", ', + '"num_employees": ', + str(company["num_employees"]), + "}", + ], + ) + for company in generate_companies + ], + ) + + expected = "\n".join(['{"success":true}' for _ in generate_companies]) + + with caplog.at_level(logging.WARNING): + response = actual_documents.import_jsonl(companies_in_jsonl_format) + assert response == expected + assert "`import_jsonl` is deprecated: please use `import_`." in caplog.text + + +def test_search( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object can search for documents on Typesense server.""" + response = actual_documents.search( + { + "q": "com", + "query_by": "company_name", + }, + ) + + assert_to_contain_keys( + response, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_array( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the SearchParameters can have arrays that are concatenated before request.""" + response = actual_documents.search( + { + "q": "com", + "query_by": ["company_name"], + }, + ) + + assert_to_contain_keys( + response, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_invalid_parameters( + actual_documents: Documents[Company], + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Documents object throws when invalid parameters are passed to search.""" + with pytest.raises(InvalidParameter): + actual_documents.search( + { + "q": "com", + "query_by": "company_name", + "invalid": [ + Company(company_name="", id="", num_employees=0), + ], + }, + ) + + with pytest.raises(InvalidParameter): + actual_documents.search( + { + "q": "com", + "query_by": "company_name", + "invalid": Company(company_name="", id="", num_employees=0), + }, + ) From 560ca08e7362e85bd80642e1e5ff66c79039d7e3 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 14:59:33 +0300 Subject: [PATCH 132/288] feat(document): add type hints to document class - Introduce type hints to the `Document` class, including generics for enhanced type safety. - Utilize `typing.TypeVar` to define `TDoc`, ensuring type consistency for documents. --- src/typesense/document.py | 70 ++++++++++++++++++++++++++++++++------- 1 file changed, 58 insertions(+), 12 deletions(-) diff --git a/src/typesense/document.py b/src/typesense/document.py index 246d45b..9cae14f 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -1,20 +1,66 @@ -class Document(object): - def __init__(self, api_call, collection_name, document_id): +import sys + +from typesense.api_call import ApiCall +from typesense.configuration import Configuration +from typesense.types.collection import CollectionSchema +from typesense.types.document import ( + DeleteQueryParameters, + DirtyValuesParameters, + DocumentSchema, + DocumentWriteParameters, +) + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + +class Document(typing.Generic[TDoc]): + def __init__( + self, api_call: ApiCall, collection_name: str, document_id: str + ) -> None: self.api_call = api_call self.collection_name = collection_name self.document_id = document_id - def _endpoint_path(self): - from .documents import Documents + @property + def _endpoint_path(self) -> str: from .collections import Collections - return u"{0}/{1}/{2}/{3}".format(Collections.RESOURCE_PATH, self.collection_name, Documents.RESOURCE_PATH, - self.document_id) + from .documents import Documents + + return "{0}/{1}/{2}/{3}".format( + Collections.RESOURCE_PATH, + self.collection_name, + Documents.RESOURCE_PATH, + self.document_id, + ) + + def retrieve(self) -> TDoc: + response = self.api_call.get( + endpoint=self._endpoint_path, + entity_type=typing.Dict[str, str], + as_json=True, + ) + + return typing.cast(TDoc, response) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + def update( + self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + ) -> TDoc: + response = self.api_call.patch( + self._endpoint_path, + body=document, + params=params, + entity_type=typing.Dict[str, str], + ) - def update(self, document, params=None): - return self.api_call.patch(self._endpoint_path(), document, params) + return typing.cast(TDoc, response) - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> TDoc: + response = self.api_call.delete( + self._endpoint_path, entity_type=typing.Dict[str, str] + ) + return typing.cast(TDoc, response) From 2f20365acab6dc621e161c56cb4e91c5dc865c3f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 15:01:43 +0300 Subject: [PATCH 133/288] test(document): add tests for document class - Introduce unit tests for the `Document` class to verify its initialization, retrieval, update, and deletion methods. - Mock API calls using `requests_mock` to simulate server interactions. - Include actual integration tests for `Document` operations on a live Typesense Server. - Ensure type safety and consistency in the test cases by using annotated return types and assertions. --- tests/document_test.py | 132 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 tests/document_test.py diff --git a/tests/document_test.py b/tests/document_test.py new file mode 100644 index 0000000..7c4d210 --- /dev/null +++ b/tests/document_test.py @@ -0,0 +1,132 @@ +"""Tests for the Document class.""" + +from __future__ import annotations + +import requests_mock + +from tests.conftest import Company +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.document import Document +from typesense.documents import Documents + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Document object is initialized correctly.""" + document = Document(fake_api_call, "companies", "0") + + assert document.document_id == "0" + assert document.collection_name == "companies" + assert_match_object(document.api_call, fake_api_call) + assert_object_lists_match(document.api_call.nodes, fake_api_call.nodes) + assert_match_object( + document.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + document._endpoint_path == "/collections/companies/documents/0" # noqa: WPS437 + ) + + +def test_retrieve(fake_document: Document) -> None: + """Test that the Document object can retrieve an document.""" + json_response: Company = { + "company_name": "Company", + "id": "0", + "num_employees": 10, + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/collections/companies/documents/0", + json=json_response, + ) + + response = fake_document.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/documents/0" + ) + assert response == json_response + + +def test_delete(fake_document: Document) -> None: + """Test that the Document object can delete an document.""" + json_response: Company = { + "company_name": "Company", + "id": "0", + "num_employees": 10, + } + with requests_mock.Mocker() as mock: + mock.delete( + "http://nearest:8108/collections/companies/documents/0", + json=json_response, + ) + + response = fake_document.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/collections/companies/documents/0" + ) + assert response == json_response + + +def test_actual_update( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can update an document on Typesense Server.""" + response = actual_documents["0"].update( + {"company_name": "Company", "num_employees": 20}, + { + "action": "update", + }, + ) + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 20}, + ) + + +def test_actual_retrieve( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can retrieve an document from Typesense Server.""" + response = actual_documents["0"].retrieve() + + assert_to_contain_object( + response, + {"id": "0", "company_name": "Company", "num_employees": 10}, + ) + + +def test_actual_delete( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + response = actual_documents["0"].delete() + + assert response == { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } From 88e4b9c3997dfbbd4ebc956604465d4031022027 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 15:06:44 +0300 Subject: [PATCH 134/288] feat(test-utils): add fixtures for multi search - Introduce `actual_multi_search_fixture` to provide a `MultiSearch` object for testing with a real API. - Add `actual_overrides_fixture` to return an `Overrides` object linked to the "companies" collection. - Enhance test setup by creating reusable fixtures to facilitate testing of multi-search and overrides operations. --- tests/conftest.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 989aab5..228cee9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,6 +17,7 @@ from typesense.documents import Documents from typesense.key import Key from typesense.keys import Keys +from typesense.multi_search import MultiSearch from typesense.operations import Operations from typesense.override import Override from typesense.overrides import Overrides @@ -333,6 +334,12 @@ def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: return Documents(actual_api_call, "companies") +@pytest.fixture(scope="function", name="actual_multi_search") +def actual_multi_search_fixture(actual_api_call: ApiCall) -> MultiSearch: + """Return a MultiSearch object using a real API.""" + return MultiSearch(actual_api_call) + + @pytest.fixture(scope="function", name="actual_overrides") def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: return Overrides(actual_api_call, "companies") From f6ac2244f1cfc193ebd09bd73ce6579b0011dc7c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 15:08:16 +0300 Subject: [PATCH 135/288] feat(multi-search): add types for multi search - Introduce `MultiSearchRequestSchema` to define the schema for multi-search requests. - Add `MultiSearchResponse` to represent the structure of multi-search responses. - Ensure compatibility with Python 3.11 by conditionally importing `typing` or `typing_extensions`. - Enhance type safety and clarity in handling multi-search operations. This update provides clear and structured types for multi-search functionality, improving code readability and reducing potential errors in handling multi-search requests and responses. --- src/typesense/types/multi_search.py | 32 +++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 src/typesense/types/multi_search.py diff --git a/src/typesense/types/multi_search.py b/src/typesense/types/multi_search.py new file mode 100644 index 0000000..392f129 --- /dev/null +++ b/src/typesense/types/multi_search.py @@ -0,0 +1,32 @@ +"""Types for multi-search.""" + +import sys + +from typesense.types.document import MultiSearchParameters, SearchResponse + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class MultiSearchResponse(typing.TypedDict): + """ + Response schema for multi-search. + + Attributes: + results (list[SearchResponse]): The search results. + """ + + results: typing.List[SearchResponse[typing.Any]] # noqa: WPS110 + + +class MultiSearchRequestSchema(typing.TypedDict): + """ + Schema for multi-search request. + + Attributes: + searches (list[MultiSearchParameters]): The search parameters. + """ + + searches: typing.List[MultiSearchParameters] From 9d6a25ebb19ae6eb05e4bdee11b29aea325ff4ee Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 15:10:14 +0300 Subject: [PATCH 136/288] feat(multi-search): add type hints to multi search class - Add type annotations to the `MultiSearch` class, improving code clarity. - Introduce `MultiSearchRequestSchema` and `MultiSearchResponse` types for better structure. - Refactor `perform` method to handle request and response more robustly. - Enhance the `perform` method to stringify search parameters before sending the request. - Ensure compatibility with Python versions prior to 3.11 using conditional imports. This refactor improves the robustness and readability of the `MultiSearch` class, ensuring better type safety and request handling. --- src/typesense/multi_search.py | 36 +++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/src/typesense/multi_search.py b/src/typesense/multi_search.py index 1e80a50..c48df5f 100644 --- a/src/typesense/multi_search.py +++ b/src/typesense/multi_search.py @@ -1,9 +1,37 @@ +import sys + +from typesense.api_call import ApiCall +from typesense.preprocess import stringify_search_params +from typesense.types.document import MultiSearchCommonParameters +from typesense.types.multi_search import MultiSearchRequestSchema, MultiSearchResponse + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class MultiSearch(object): - RESOURCE_PATH = '/multi_search' + RESOURCE_PATH = "/multi_search" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - def perform(self, search_queries, common_params): - return self.api_call.post(MultiSearch.RESOURCE_PATH, search_queries, common_params) + def perform( + self, + search_queries: MultiSearchRequestSchema, + common_params: typing.Union[MultiSearchCommonParameters, None] = None, + ) -> MultiSearchResponse: + stringified_search_params = [ + stringify_search_params(search_params) + for search_params in search_queries.get("searches") + ] + search_body = {"searches": stringified_search_params} + response: MultiSearchResponse = self.api_call.post( + MultiSearch.RESOURCE_PATH, + body=search_body, + params=common_params, + as_json=True, + entity_type=MultiSearchResponse, + ) + return response From 2c3e02e9225645b785fb1663b53b9a5e06da07bd Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 28 Aug 2024 15:12:28 +0300 Subject: [PATCH 137/288] test(multi-search): add tests for multi search class - Introduce comprehensive test cases for the `MultiSearch` class. - Test single and multiple search queries to ensure accurate results. - Verify handling of array-based `query_by` parameters. - Include tests for error handling with invalid parameters to ensure robustness. - Utilize utility functions like `assert_match_object` and `assert_to_contain_keys` for precise assertions. These tests enhance coverage for the `MultiSearch` class, ensuring its reliability and robustness under various scenarios. --- tests/multi_search_test.py | 174 +++++++++++++++++++++++++++++++++++++ 1 file changed, 174 insertions(+) create mode 100644 tests/multi_search_test.py diff --git a/tests/multi_search_test.py b/tests/multi_search_test.py new file mode 100644 index 0000000..4e6d0f9 --- /dev/null +++ b/tests/multi_search_test.py @@ -0,0 +1,174 @@ +"""Tests for the MultiSearch class.""" + +import pytest + +from tests.conftest import Company +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense import exceptions +from typesense.api_call import ApiCall +from typesense.multi_search import MultiSearch +from typesense.types.multi_search import MultiSearchRequestSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Document object is initialized correctly.""" + documents = MultiSearch(fake_api_call) + + assert_match_object(documents.api_call, fake_api_call) + assert_object_lists_match(documents.api_call.nodes, fake_api_call.nodes) + assert_match_object( + documents.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + +def test_multi_search_single_search( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform a single search.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + ], + } + response = actual_multi_search.perform( + search_queries=request_params, + ) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_multi_search_multiple_searches( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform multiple searches.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = actual_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == len(request_params.get("searches")) + for search_results in response.get("results"): + assert_to_contain_keys( + search_results, + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + search_results.get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_multi_search_array( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform a search with an array query_by.""" + request_params: MultiSearchRequestSchema = { + "searches": [ + {"q": "com", "query_by": ["company_name"], "collection": "companies"}, + ], + } + response = actual_multi_search.perform(search_queries=request_params) + + assert len(response.get("results")) == 1 + assert_to_contain_keys( + response.get("results")[0], + [ + "facet_counts", + "found", + "hits", + "page", + "out_of", + "request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("results")[0].get("hits")[0], + ["document", "highlights", "highlight", "text_match", "text_match_info"], + ) + + +def test_search_invalid_parameters( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object raises an error when invalid parameters are passed.""" + with pytest.raises(exceptions.InvalidParameter): + actual_multi_search.perform( + { + "searches": [ + { + "q": "com", + "query_by": "company_name", + "invalid": [Company(company_name="", id="", num_employees=0)], + }, + ], + }, + ) + + with pytest.raises(exceptions.InvalidParameter): + actual_multi_search.perform( + { + "searches": [ + { + "q": "com", + "query_by": "company_name", + "invalid": Company(company_name="", id="", num_employees=0), + }, + ], + }, + ) From c9a9877ad1396bf1a2fcd1d28db5fec94a82ba7a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 11:36:50 +0300 Subject: [PATCH 138/288] chore: upgrade development environment and tooling - Migrate to Pipfile for improved dependency management: - Ensures consistent package versions across different Python versions - Simplifies dependency installation and management - Enhance code quality and consistency: - Add wemake styling guide for standardized coding conventions - Integrate flake8 for linting and style checking - Incorporate black for automatic code formatting - Improve testing infrastructure: - Add pytest for robust unit testing - Include pytest-mock for better test mocking capabilities - Integrate coverage for measuring test coverage - Expand project dependencies: - Add requests for HTTP operations - Include requests-mock for testing HTTP requests - Incorporate python-dotenv for environment variable management - Add types-requests for improved type hinting with requests library - Include typing-extensions for backward compatibility with Python <3.11 - Enhance development tools: - Add faker for generating test data - Maintain compatibility: - Generate requirements.txt from Pipfile for environments not using Pipenv These changes aim to modernize the development environment, improve code quality, enhance testing capabilities, and ensure better compatibility across different Python versions and development setups. chore: migrate to Pipfile and add mypy - Ensure proper package versions are installed for each of the supported python versions through pipfile chore: add wemake styling guide - Add a styling and linting guide to ensure the use of proper techniques and conventions chore: add flake8 chore: add black chore: add pytest chore: add coverage chore: add pytest-mock chore: add requests chore: add requests-mock chore: add python-dotenv chore: add types-requests chore: add typing-extensions for python ver under 3.11 chore: add faker chore: generate requirements from pipfile --- Pipfile | 24 ++ Pipfile.lock | 905 ++++++++++++++++++++++++++++++++++++++++ requirements.txt | 19 +- requirements/common.txt | 6 + requirements/dev.txt | 64 +++ 5 files changed, 1001 insertions(+), 17 deletions(-) create mode 100644 Pipfile create mode 100644 Pipfile.lock create mode 100644 requirements/common.txt create mode 100644 requirements/dev.txt diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..4c9511e --- /dev/null +++ b/Pipfile @@ -0,0 +1,24 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +requests = "*" + +[dev-packages] +mypy = "*" +wemake-python-styleguide = "*" +flake8 = "*" +black = "*" +pytest = "*" +coverage = "*" +pytest-mock = "*" +requests-mock = "*" +python-dotenv = "*" +types-requests = "*" +typing-extensions = {version = "*", markers = "python_version < '3.11'"} +faker = "*" + +[requires] +python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..73fd24d --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,905 @@ +{ + "_meta": { + "hash": { + "sha256": "cb14364b2d2eeadbe1841052b4943674e278b784f994853d1a394128c4a5cbac" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.8" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", + "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.8.30" + }, + "charset-normalizer": { + "hashes": [ + "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", + "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", + "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", + "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", + "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", + "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", + "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", + "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", + "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", + "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", + "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", + "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", + "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", + "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", + "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", + "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", + "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", + "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", + "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", + "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", + "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", + "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", + "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", + "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", + "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", + "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", + "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", + "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", + "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", + "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", + "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", + "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", + "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", + "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", + "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", + "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", + "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", + "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", + "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", + "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", + "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", + "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", + "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", + "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", + "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", + "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", + "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", + "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", + "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", + "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", + "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", + "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", + "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", + "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", + "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", + "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", + "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", + "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", + "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", + "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", + "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", + "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", + "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", + "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", + "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", + "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", + "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", + "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", + "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", + "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", + "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", + "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", + "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", + "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", + "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", + "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", + "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", + "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", + "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", + "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", + "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", + "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", + "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", + "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", + "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", + "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", + "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", + "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", + "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", + "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.3.2" + }, + "idna": { + "hashes": [ + "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", + "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" + ], + "markers": "python_version >= '3.6'", + "version": "==3.8" + }, + "requests": { + "hashes": [ + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==2.32.3" + }, + "urllib3": { + "hashes": [ + "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", + "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" + ], + "markers": "python_version >= '3.8'", + "version": "==2.2.2" + } + }, + "develop": { + "astor": { + "hashes": [ + "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", + "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.8.1" + }, + "attrs": { + "hashes": [ + "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", + "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2" + ], + "markers": "python_version >= '3.7'", + "version": "==24.2.0" + }, + "bandit": { + "hashes": [ + "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec", + "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61" + ], + "markers": "python_version >= '3.8'", + "version": "==1.7.9" + }, + "black": { + "hashes": [ + "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6", + "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e", + "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", + "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", + "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e", + "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd", + "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", + "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", + "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", + "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42", + "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", + "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb", + "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", + "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb", + "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", + "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", + "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47", + "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2", + "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a", + "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c", + "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920", + "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==24.8.0" + }, + "certifi": { + "hashes": [ + "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", + "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.8.30" + }, + "charset-normalizer": { + "hashes": [ + "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", + "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", + "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", + "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", + "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", + "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", + "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", + "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", + "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", + "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", + "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", + "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", + "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", + "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", + "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", + "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", + "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", + "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", + "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", + "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", + "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", + "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", + "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", + "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", + "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", + "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", + "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", + "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", + "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", + "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", + "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", + "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", + "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", + "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", + "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", + "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", + "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", + "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", + "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", + "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", + "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", + "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", + "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", + "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", + "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", + "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", + "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", + "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", + "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", + "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", + "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", + "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", + "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", + "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", + "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", + "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", + "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", + "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", + "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", + "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", + "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", + "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", + "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", + "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", + "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", + "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", + "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", + "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", + "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", + "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", + "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", + "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", + "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", + "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", + "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", + "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", + "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", + "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", + "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", + "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", + "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", + "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", + "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", + "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", + "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", + "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", + "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", + "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", + "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", + "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.3.2" + }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "coverage": { + "hashes": [ + "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", + "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", + "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", + "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", + "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", + "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", + "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", + "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", + "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", + "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", + "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", + "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", + "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", + "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", + "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", + "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", + "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", + "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", + "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", + "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", + "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", + "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", + "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", + "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", + "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", + "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", + "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", + "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", + "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", + "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", + "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", + "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", + "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", + "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", + "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", + "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", + "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", + "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", + "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", + "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", + "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", + "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", + "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", + "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", + "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", + "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", + "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", + "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", + "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", + "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", + "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", + "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", + "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", + "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", + "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", + "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", + "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", + "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", + "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", + "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", + "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", + "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", + "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", + "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", + "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", + "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", + "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", + "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", + "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", + "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", + "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", + "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==7.6.1" + }, + "darglint": { + "hashes": [ + "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da", + "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d" + ], + "markers": "python_version >= '3.6' and python_version < '4.0'", + "version": "==1.8.1" + }, + "docutils": { + "hashes": [ + "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", + "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b" + ], + "markers": "python_version >= '3.7'", + "version": "==0.20.1" + }, + "eradicate": { + "hashes": [ + "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37", + "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e" + ], + "version": "==2.3.0" + }, + "exceptiongroup": { + "hashes": [ + "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", + "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" + ], + "markers": "python_version < '3.11'", + "version": "==1.2.2" + }, + "faker": { + "hashes": [ + "sha256:0d3c0399204aaf8205cc1750db443474ca0436f177126b2c27b798e8336cc74f", + "sha256:6a3a08be54c37e05f7943d7ba5211d252c1de737687a46ad6f29209d8d5db11f" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==28.0.0" + }, + "flake8": { + "hashes": [ + "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", + "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1'", + "version": "==7.1.1" + }, + "flake8-bandit": { + "hashes": [ + "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e", + "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d" + ], + "markers": "python_version >= '3.6'", + "version": "==4.1.1" + }, + "flake8-broken-line": { + "hashes": [ + "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9", + "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85" + ], + "markers": "python_version >= '3.8' and python_version < '4.0'", + "version": "==1.0.0" + }, + "flake8-bugbear": { + "hashes": [ + "sha256:32b2903e22331ae04885dae25756a32a8c666c85142e933f43512a70f342052a", + "sha256:83324bad4d90fee4bf64dd69c61aff94debf8073fbd807c8b6a36eec7a2f0719" + ], + "markers": "python_full_version >= '3.8.1'", + "version": "==23.12.2" + }, + "flake8-commas": { + "hashes": [ + "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263", + "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54" + ], + "version": "==2.1.0" + }, + "flake8-comprehensions": { + "hashes": [ + "sha256:923c22603e0310376a6b55b03efebdc09753c69f2d977755cba8bb73458a5d4d", + "sha256:b7e027bbb52be2ceb779ee12484cdeef52b0ad3c1fcb8846292bdb86d3034681" + ], + "markers": "python_version >= '3.8'", + "version": "==3.15.0" + }, + "flake8-debugger": { + "hashes": [ + "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf", + "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840" + ], + "markers": "python_version >= '3.7'", + "version": "==4.1.2" + }, + "flake8-docstrings": { + "hashes": [ + "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", + "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75" + ], + "markers": "python_version >= '3.7'", + "version": "==1.7.0" + }, + "flake8-eradicate": { + "hashes": [ + "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22", + "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6" + ], + "markers": "python_version >= '3.8' and python_version < '4.0'", + "version": "==1.5.0" + }, + "flake8-isort": { + "hashes": [ + "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12", + "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3" + ], + "markers": "python_version >= '3.8'", + "version": "==6.1.1" + }, + "flake8-quotes": { + "hashes": [ + "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c" + ], + "version": "==3.4.0" + }, + "flake8-rst-docstrings": { + "hashes": [ + "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4", + "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c" + ], + "markers": "python_version >= '3.7'", + "version": "==0.3.0" + }, + "flake8-string-format": { + "hashes": [ + "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", + "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af" + ], + "version": "==0.3.0" + }, + "idna": { + "hashes": [ + "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", + "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" + ], + "markers": "python_version >= '3.6'", + "version": "==3.8" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "isort": { + "hashes": [ + "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", + "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6" + ], + "markers": "python_full_version >= '3.8.0'", + "version": "==5.13.2" + }, + "markdown-it-py": { + "hashes": [ + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, + "mypy": { + "hashes": [ + "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", + "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce", + "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", + "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b", + "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", + "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24", + "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383", + "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7", + "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86", + "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d", + "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", + "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8", + "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", + "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", + "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", + "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef", + "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6", + "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", + "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", + "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70", + "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", + "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", + "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a", + "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", + "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1", + "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b", + "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.11.2" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "packaging": { + "hashes": [ + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" + ], + "markers": "python_version >= '3.8'", + "version": "==24.1" + }, + "pathspec": { + "hashes": [ + "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", + "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" + ], + "markers": "python_version >= '3.8'", + "version": "==0.12.1" + }, + "pbr": { + "hashes": [ + "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24", + "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a" + ], + "markers": "python_version >= '2.6'", + "version": "==6.1.0" + }, + "pep8-naming": { + "hashes": [ + "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", + "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80" + ], + "markers": "python_version >= '3.7'", + "version": "==0.13.3" + }, + "platformdirs": { + "hashes": [ + "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee", + "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3" + ], + "markers": "python_version >= '3.8'", + "version": "==4.2.2" + }, + "pluggy": { + "hashes": [ + "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", + "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" + ], + "markers": "python_version >= '3.8'", + "version": "==1.5.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", + "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" + ], + "markers": "python_version >= '3.8'", + "version": "==2.12.1" + }, + "pydocstyle": { + "hashes": [ + "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", + "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1" + ], + "markers": "python_version >= '3.6'", + "version": "==6.3.0" + }, + "pyflakes": { + "hashes": [ + "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", + "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" + ], + "markers": "python_version >= '3.8'", + "version": "==3.2.0" + }, + "pygments": { + "hashes": [ + "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", + "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" + ], + "markers": "python_version >= '3.8'", + "version": "==2.18.0" + }, + "pytest": { + "hashes": [ + "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5", + "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==8.3.2" + }, + "pytest-mock": { + "hashes": [ + "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", + "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==3.14.0" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9.0.post0" + }, + "python-dotenv": { + "hashes": [ + "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", + "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==1.0.1" + }, + "pyyaml": { + "hashes": [ + "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff", + "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", + "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", + "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", + "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", + "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", + "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", + "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", + "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", + "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", + "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a", + "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", + "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", + "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", + "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", + "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", + "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", + "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a", + "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", + "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", + "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", + "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", + "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", + "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", + "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", + "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", + "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", + "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", + "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", + "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706", + "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", + "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", + "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", + "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083", + "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", + "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", + "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", + "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", + "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", + "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", + "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", + "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", + "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", + "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", + "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5", + "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d", + "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", + "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", + "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", + "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", + "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", + "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", + "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4" + ], + "markers": "python_version >= '3.8'", + "version": "==6.0.2" + }, + "requests": { + "hashes": [ + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==2.32.3" + }, + "requests-mock": { + "hashes": [ + "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", + "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401" + ], + "index": "pypi", + "markers": "python_version >= '3.5'", + "version": "==1.12.1" + }, + "restructuredtext-lint": { + "hashes": [ + "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45" + ], + "version": "==1.4.0" + }, + "rich": { + "hashes": [ + "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc", + "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==13.8.0" + }, + "setuptools": { + "hashes": [ + "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f", + "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e" + ], + "markers": "python_version >= '3.8'", + "version": "==74.0.0" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "snowballstemmer": { + "hashes": [ + "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", + "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" + ], + "version": "==2.2.0" + }, + "stevedore": { + "hashes": [ + "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78", + "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a" + ], + "markers": "python_version >= '3.8'", + "version": "==5.3.0" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11'", + "version": "==2.0.1" + }, + "types-requests": { + "hashes": [ + "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358", + "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3" + ], + "index": "pypi", + "markers": "python_version >= '3.8'", + "version": "==2.32.0.20240712" + }, + "typing-extensions": { + "hashes": [ + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" + ], + "markers": "python_version < '3.11'", + "version": "==4.12.2" + }, + "urllib3": { + "hashes": [ + "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", + "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" + ], + "markers": "python_version >= '3.8'", + "version": "==2.2.2" + }, + "wemake-python-styleguide": { + "hashes": [ + "sha256:2219be145185edcd5e01f4ce49e3dea11acc34f2c377face0c175bb6ea6ac988", + "sha256:69139858cf5b2a9ba09dac136e2873a4685515768f68fdef2684ebefd7b1dafd" + ], + "index": "pypi", + "markers": "python_version < '4.0' and python_full_version >= '3.8.1'", + "version": "==0.18.0" + } + } +} diff --git a/requirements.txt b/requirements.txt index d06fed1..9bdf039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,17 +1,2 @@ -certifi==2024.7.4 -chardet==3.0.4 -charset-normalizer==3.3.2 -idna==2.8 -Faker==27.4.0 -iniconfig==2.0.0 -isort==5.13.2 -mypy==1.11.0 -mypy-extensions==1.0.0 -packaging==24.1 -pluggy==1.5.0 -pytest==8.3.2 -python-dateutil==2.9.0.post0 -requests==2.32.3 -six==1.16.0 -typing_extensions==4.12.2 -urllib3==2.2.2 +-i https://pypi.org/simple +-r requirements/common.txt diff --git a/requirements/common.txt b/requirements/common.txt new file mode 100644 index 0000000..7b89932 --- /dev/null +++ b/requirements/common.txt @@ -0,0 +1,6 @@ +-i https://pypi.org/simple +certifi==2024.8.30; python_version >= '3.6' +charset-normalizer==3.3.2; python_full_version >= '3.7.0' +idna==3.8; python_version >= '3.6' +requests==2.32.3; python_version >= '3.8' +urllib3==2.2.2; python_version >= '3.8' diff --git a/requirements/dev.txt b/requirements/dev.txt new file mode 100644 index 0000000..1b4f41e --- /dev/null +++ b/requirements/dev.txt @@ -0,0 +1,64 @@ +-i https://pypi.org/simple +-r common.txt +astor==0.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +attrs==24.2.0; python_version >= '3.7' +bandit==1.7.9; python_version >= '3.8' +black==24.8.0; python_version >= '3.8' +certifi==2024.8.30; python_version >= '3.6' +charset-normalizer==3.3.2; python_full_version >= '3.7.0' +click==8.1.7; python_version >= '3.7' +coverage==7.6.1; python_version >= '3.8' +darglint==1.8.1; python_version >= '3.6' and python_version < '4.0' +docutils==0.20.1; python_version >= '3.7' +eradicate==2.3.0 +exceptiongroup==1.2.2; python_version < '3.11' +faker==28.0.0; python_version >= '3.8' +flake8==7.1.1; python_full_version >= '3.8.1' +flake8-bandit==4.1.1; python_version >= '3.6' +flake8-broken-line==1.0.0; python_version >= '3.8' and python_version < '4.0' +flake8-bugbear==23.12.2; python_full_version >= '3.8.1' +flake8-commas==2.1.0 +flake8-comprehensions==3.15.0; python_version >= '3.8' +flake8-debugger==4.1.2; python_version >= '3.7' +flake8-docstrings==1.7.0; python_version >= '3.7' +flake8-eradicate==1.5.0; python_version >= '3.8' and python_version < '4.0' +flake8-isort==6.1.1; python_version >= '3.8' +flake8-quotes==3.4.0 +flake8-rst-docstrings==0.3.0; python_version >= '3.7' +flake8-string-format==0.3.0 +idna==3.8; python_version >= '3.6' +iniconfig==2.0.0; python_version >= '3.7' +isort==5.13.2; python_full_version >= '3.8.0' +markdown-it-py==3.0.0; python_version >= '3.8' +mccabe==0.7.0; python_version >= '3.6' +mdurl==0.1.2; python_version >= '3.7' +mypy==1.11.2; python_version >= '3.8' +mypy-extensions==1.0.0; python_version >= '3.5' +packaging==24.1; python_version >= '3.8' +pathspec==0.12.1; python_version >= '3.8' +pbr==6.1.0; python_version >= '2.6' +pep8-naming==0.13.3; python_version >= '3.7' +platformdirs==4.2.2; python_version >= '3.8' +pluggy==1.5.0; python_version >= '3.8' +pycodestyle==2.12.1; python_version >= '3.8' +pydocstyle==6.3.0; python_version >= '3.6' +pyflakes==3.2.0; python_version >= '3.8' +pygments==2.18.0; python_version >= '3.8' +pytest==8.3.2; python_version >= '3.8' +pytest-mock==3.14.0; python_version >= '3.8' +python-dateutil==2.9.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +python-dotenv==1.0.1; python_version >= '3.8' +pyyaml==6.0.2; python_version >= '3.8' +requests==2.32.3; python_version >= '3.8' +requests-mock==1.12.1; python_version >= '3.5' +restructuredtext-lint==1.4.0 +rich==13.8.0; python_full_version >= '3.7.0' +setuptools==74.0.0; python_version >= '3.8' +six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' +snowballstemmer==2.2.0 +stevedore==5.3.0; python_version >= '3.8' +tomli==2.0.1; python_version < '3.11' +types-requests==2.32.0.20240712; python_version >= '3.8' +typing-extensions==4.12.2; python_version < '3.11' +urllib3==2.2.2; python_version >= '3.8' +wemake-python-styleguide==0.18.0; python_version < '4.0' and python_full_version >= '3.8.1' From c457138596cc8b7e89ff4b050d65b237ea6248f1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:09:44 +0300 Subject: [PATCH 139/288] feat(convo-model): add types for convo models - Introduce `ConversationModelCreateSchema` for model creation - Add `ConversationModelDeleteSchema` for model deletion - Create `ConversationModelSchema` for general model representation - Enhance type checking for conversation-related features - Improve documentation with detailed attribute descriptions - Support different LLM providers: OpenAI, Cloudflare, and vLLM - Use `typing.TypedDict` for strong typing of model attributes - Add `typing_extensions` support for Python versions < 3.11 --- src/typesense/types/conversations_model.py | 83 ++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 src/typesense/types/conversations_model.py diff --git a/src/typesense/types/conversations_model.py b/src/typesense/types/conversations_model.py new file mode 100644 index 0000000..c5d2c17 --- /dev/null +++ b/src/typesense/types/conversations_model.py @@ -0,0 +1,83 @@ +"""ConversationalModel types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class ConversationModelCreateSchema(typing.TypedDict): + """ + Schema for creating a new conversation model. + + Attributes: + model_name (str): Name of the LLM model offered by OpenAI, Cloudflare or vLLM. + + api_key (str): The LLM service's API Key. + + system_prompt (str): The system prompt that contains special instructions to the LLM. + + max_bytes (int): The maximum number of bytes to send to the LLM in every API call. + Consult the LLM's documentation on the number of bytes supported in the context window. + + history_collection (str): Typesense collection that stores the historical conversations. + + account_id (str): LLM service's account ID (only applicable for Cloudflare). + + ttl (int): Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours). + + vllm_url (str): The URL of the vLLM service. + + id (str): The custom ID of the model. + """ + + id: typing.NotRequired[str] + model_name: str + api_key: str + system_prompt: typing.NotRequired[str] + max_bytes: int + history_collection: str + account_id: typing.NotRequired[str] + ttl: typing.NotRequired[int] + vllm_url: typing.NotRequired[str] + + +class ConversationModelDeleteSchema(typing.TypedDict): + """ + Schema for deleting a conversation model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +class ConversationModelSchema( + ConversationModelCreateSchema, +): + """ + Schema for a conversation model. + + Attributes: + model_name (str): Name of the LLM model offered by OpenAI, Cloudflare or vLLM. + + api_key (str): The LLM service's API Key. + + system_prompt (str): The system prompt that contains special instructions to the LLM. + + max_bytes (int): The maximum number of bytes to send to the LLM in every API call. + Consult the LLM's documentation on the number of bytes supported in the context window. + + history_collection (str): Typesense collection that stores the historical conversations. + + account_id (str): LLM service's account ID (only applicable for Cloudflare). + + ttl (int): Time interval in seconds after which the messages would be deleted. Default: 86400 (24 hours). + + vllm_url (str): The URL of the vLLM service. + + id (str): The custom ID of the model. + """ From 7c0f53f942f63b37a008d502ee1e783e6852b9d7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:07:52 +0300 Subject: [PATCH 140/288] feat(test-utils): add fixtures for conversation models - Add create_conversation_history_collection fixture - Implementcreate_conversations_model fixture - Include delete_all_conversations_models cleanup fixture - Add actual and fake fixtures for ConversationsModels - Update imports and dependencies in `conftest.py` --- tests/conftest.py | 96 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 228cee9..8eb0531 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,8 @@ from typesense.collection import Collection from typesense.collections import Collections from typesense.configuration import Configuration +from typesense.conversation_model import ConversationModel +from typesense.conversations_models import ConversationsModels from typesense.document import Document from typesense.documents import Documents from typesense.key import Key @@ -87,6 +89,58 @@ def create_document_fixture() -> None: response.raise_for_status() +@pytest.fixture(scope="function", name="create_conversation_history_collection") +def create_conversation_history_collection_fixture() -> None: + """Create a collection for conversation history in the Typesense server.""" + url = "http://localhost:8108/collections" + delete_url = "http://localhost:8108/collections/conversation_store" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "conversation_store", + "fields": [ + {"name": "conversation_id", "type": "string"}, + {"name": "model_id", "type": "string"}, + {"name": "timestamp", "type": "int32"}, + {"name": "role", "type": "string", "index": False}, + {"name": "message", "type": "string", "index": False}, + ], + } + + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + if delete_response.status_code not in {200, 404}: + delete_response.raise_for_status() + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_conversations_model") +def create_conversations_model_fixture( + create_conversation_history_collection: None, +) -> str: + """Create a conversations model in the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + conversations_model_data = { + "api_key": os.environ["OPEN_AI_KEY"], + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "history_collection": "conversation_store", + "system_prompt": "This is a system prompt", + } + + response = requests.post( + url, + headers=headers, + json=conversations_model_data, + timeout=3, + ) + + response.raise_for_status() + + conversation_model_id: str = response.json()["id"] + return conversation_model_id + + @pytest.fixture(scope="function", name="create_stopword") def create_stopword_fixture() -> None: """Create a stopword set in the Typesense server.""" @@ -134,6 +188,26 @@ def clear_typesense_aliases() -> None: alias_name = alias.get("name") delete_url = f"{url}/{alias_name}" delete_response = requests.delete(delete_url, headers=headers) +@pytest.fixture(scope="function", name="delete_all_conversations_models") +def clear_typesense_conversations_models() -> None: + """Remove all conversations_models from the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + conversations_models = response.json() + + # Delete each alias + for conversation_model in conversations_models: + conversation_model_id = conversation_model.get("id") + delete_url = f"{url}/{conversation_model_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + @pytest.fixture(scope="function", name="delete_all_stopwords") def clear_typesense_stopwords() -> None: """Remove all stopwords from the Typesense server.""" @@ -345,6 +419,14 @@ def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: return Overrides(actual_api_call, "companies") +@pytest.fixture(scope="function", name="actual_conversations_models") +def actual_conversations_models_fixture( + actual_api_call: ApiCall, +) -> ConversationsModels: + """Return a ConversationsModels object using a real API.""" + return ConversationsModels(actual_api_call) + + @pytest.fixture(scope="function", name="actual_synonyms") def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: return Synonyms(actual_api_call, "companies") @@ -442,6 +524,18 @@ def fake_overrides_fixture(fake_api_call: ApiCall) -> Overrides: return Overrides(fake_api_call, "companies") +@pytest.fixture(scope="function", name="fake_conversations_models") +def fake_conversations_models_fixture(fake_api_call: ApiCall) -> ConversationsModels: + """Return a Collection object with test values.""" + return ConversationsModels(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_conversation_model") +def fake_conversation_model_fixture(fake_api_call: ApiCall) -> ConversationModel: + """Return a ConversationModel object with test values.""" + return ConversationModel(fake_api_call, "conversation_model_id") + + @pytest.fixture(scope="function", name="fake_override") def fake_override_fixture(fake_api_call: ApiCall) -> Override: """Return a Collection object with test values.""" @@ -470,6 +564,8 @@ def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: """Return a Collection object with test values.""" return Alias(fake_api_call, "company_alias") + + @pytest.fixture(scope="function", name="fake_stopwords") def fake_stopwords_fixture(fake_api_call: ApiCall) -> Stopwords: """Return a Stopwords object with test values.""" From 7a4c4511b9fccc7bd51027a19f5f75c5604a1956 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:11:35 +0300 Subject: [PATCH 141/288] feat(convo-models): add type hints to convo models Enhance ConversationsModels with type annotations - Add type hints to `ConversationsModels` class methods - Improve `create()` and `retrieve()` methods with specific types - Use `ConversationModelCreateSchema` and `ConversationModelSchema` - Implement `typing.Dict` for `conversations_models` attribute - Update `__getitem__` method with proper return type - Ensure compatibility with Python versions before 3.11 --- src/typesense/conversations_models.py | 47 +++++++++++++++++++++------ 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/src/typesense/conversations_models.py b/src/typesense/conversations_models.py index 58fd3a7..785b8ac 100644 --- a/src/typesense/conversations_models.py +++ b/src/typesense/conversations_models.py @@ -1,21 +1,48 @@ +import sys + +from typesense.api_call import ApiCall +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelSchema, +) + +if sys.version_info > (3, 11): + import typing +else: + import typing_extensions as typing + from .conversation_model import ConversationModel class ConversationsModels(object): - RESOURCE_PATH = '/conversations/models' + RESOURCE_PATH = "/conversations/models" - def __init__(self, api_call): + def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - self.conversations_models = {} + self.conversations_models: typing.Dict[str, ConversationModel] = {} - def __getitem__(self, model_id): + def __getitem__(self, model_id: str) -> ConversationModel: if model_id not in self.conversations_models: - self.conversations_models[model_id] = ConversationModel(self.api_call, model_id) + self.conversations_models[model_id] = ConversationModel( + self.api_call, + model_id, + ) - return self.conversations_models.get(model_id) + return self.conversations_models[model_id] - def create(self, model): - return self.api_call.post(ConversationsModels.RESOURCE_PATH, model) + def create(self, model: ConversationModelCreateSchema) -> ConversationModelSchema: + response = self.api_call.post( + endpoint=ConversationsModels.RESOURCE_PATH, + entity_type=ConversationModelSchema, + as_json=True, + body=model, + ) + return response - def retrieve(self): - return self.api_call.get(ConversationsModels.RESOURCE_PATH) + def retrieve(self) -> typing.List[ConversationModelSchema]: + response: typing.List[ConversationModelSchema] = self.api_call.get( + endpoint=ConversationsModels.RESOURCE_PATH, + entity_type=typing.List[ConversationModelSchema], + as_json=True, + ) + return response From c402af23c9d606d9db34d20f6a4569e19c8bff70 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:13:44 +0300 Subject: [PATCH 142/288] test(convo-models): add tests for convo models class - Implement unit tests for initialization, retrieval, and creation - Test both fake and actual API interactions - Verify proper handling of existing and missing models - Ensure compatibility with Python versions before 3.11 - Use `requests_mock` for simulating HTTP responses - Add assertions for object structure and API call correctness --- pytest.ini | 2 + tests/conversations_models_test.py | 176 +++++++++++++++++++++++++++++ 2 files changed, 178 insertions(+) create mode 100644 tests/conversations_models_test.py diff --git a/pytest.ini b/pytest.ini index fcccae1..fd1accd 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,4 @@ [pytest] pythonpath = src +markers = + open_ai diff --git a/tests/conversations_models_test.py b/tests/conversations_models_test.py new file mode 100644 index 0000000..8b6979f --- /dev/null +++ b/tests/conversations_models_test.py @@ -0,0 +1,176 @@ +"""Tests for the ConversationsModels class.""" + +from __future__ import annotations + +import os +import sys + +import pytest +import requests_mock + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.conversations_models import ConversationsModels +from typesense.types.conversations_model import ConversationModelSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the ConversationsModels object is initialized correctly.""" + conversations_models = ConversationsModels(fake_api_call) + + assert_match_object(conversations_models.api_call, fake_api_call) + assert_object_lists_match(conversations_models.api_call.nodes, fake_api_call.nodes) + assert_match_object( + conversations_models.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not conversations_models.conversations_models + + +def test_get_missing_conversations_model( + fake_conversations_models: ConversationsModels, +) -> None: + """Test that the ConversationsModels object can get a missing conversations_model.""" + conversations_model = fake_conversations_models["conversation_model_id"] + + assert_match_object( + conversations_model.api_call, + fake_conversations_models.api_call, + ) + assert_object_lists_match( + conversations_model.api_call.nodes, + fake_conversations_models.api_call.nodes, + ) + assert_match_object( + conversations_model.api_call.config.nearest_node, + fake_conversations_models.api_call.config.nearest_node, + ) + assert ( + conversations_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +def test_get_existing_conversations_model( + fake_conversations_models: ConversationsModels, +) -> None: + """Test that the ConversationsModels object can get an existing conversations_model.""" + conversations_model = fake_conversations_models["conversations_model_id"] + fetched_conversations_model = fake_conversations_models["conversations_model_id"] + + assert len(fake_conversations_models.conversations_models) == 1 + + assert conversations_model is fetched_conversations_model + + +def test_retrieve(fake_conversations_models: ConversationsModels) -> None: + """Test that the ConversationsModels object can retrieve conversations_models.""" + json_response: typing.List[ConversationModelSchema] = [ + { + "api_key": "abc", + "id": "1", + "max_bytes": 1000000, + "model_name": "openAI-gpt-3", + "system_prompt": "This is a system prompt", + }, + ] + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/conversations/models", + json=json_response, + ) + + response = fake_conversations_models.retrieve() + + assert len(response) == 1 + assert response[0] == json_response[0] + assert response == json_response + + +def test_create(fake_conversations_models: ConversationsModels) -> None: + """Test that the ConversationsModels object can create a conversations_model.""" + json_response: ConversationModelSchema = { + "api_key": "abc", + "id": "1", + "max_bytes": 1000000, + "model_name": "openAI-gpt-3", + "system_prompt": "This is a system prompt", + } + + with requests_mock.Mocker() as mock: + mock.post( + "http://nearest:8108/conversations/models", + json=json_response, + ) + + fake_conversations_models.create( + model={ + "api_key": "abc", + "id": "1", + "max_bytes": 1000000, + "model_name": "openAI-gpt-3", + "system_prompt": "This is a system prompt", + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "POST" + assert mock.last_request.url == "http://nearest:8108/conversations/models" + assert mock.last_request.json() == json_response + + +@pytest.mark.open_ai +def test_actual_create( + actual_conversations_models: ConversationsModels, + create_conversation_history_collection: None, +) -> None: + """Test that it can create an conversations_model on Typesense Server.""" + response = actual_conversations_models.create( + { + "api_key": os.environ["OPEN_AI_KEY"], + "history_collection": "conversation_store", + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_conversations_models: ConversationsModels, + delete_all: None, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can retrieve an conversations_model from Typesense Server.""" + response = actual_conversations_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_conversations_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) From 7337c3355b4dd8283880f4ea899ea86fc56e4ed2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:15:45 +0300 Subject: [PATCH 143/288] feat(convo-model): add type hints to convo model class Refactor `ConversationModel` methods and add type annotations - Add type annotations for `__init__`, `_endpoint_path`, `retrieve`, `update`, and `delete` methods - Convert `_endpoint_path` method to a `@property` - Refactor `retrieve`, `update`, and `delete` methods to use typed responses - Ensure correct request handling by updating API call methods to return strongly typed schemas (`ConversationModelSchema`, `ConversationModelCreateSchema`, and `ConversationModelDeleteSchema`) --- src/typesense/conversation_model.py | 42 ++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/src/typesense/conversation_model.py b/src/typesense/conversation_model.py index da84aba..7da67cc 100644 --- a/src/typesense/conversation_model.py +++ b/src/typesense/conversation_model.py @@ -1,17 +1,41 @@ +from typesense.api_call import ApiCall +from typesense.types.conversations_model import ( + ConversationModelCreateSchema, + ConversationModelDeleteSchema, + ConversationModelSchema, +) + + class ConversationModel(object): - def __init__(self, api_call, model_id): + def __init__(self, api_call: ApiCall, model_id: str) -> None: self.model_id = model_id self.api_call = api_call - def _endpoint_path(self): + @property + def _endpoint_path(self) -> str: from .conversations_models import ConversationsModels - return u"{0}/{1}".format(ConversationsModels.RESOURCE_PATH, self.model_id) - def retrieve(self): - return self.api_call.get(self._endpoint_path()) + return "{0}/{1}".format(ConversationsModels.RESOURCE_PATH, self.model_id) + + def retrieve(self) -> ConversationModelSchema: + response = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=ConversationModelSchema, + ) + return response - def update(self, model): - return self.api_call.put(self._endpoint_path(), model) + def update(self, model: ConversationModelCreateSchema) -> ConversationModelSchema: + response: ConversationModelSchema = self.api_call.put( + self._endpoint_path, + body=model, + entity_type=ConversationModelSchema, + ) + return response - def delete(self): - return self.api_call.delete(self._endpoint_path()) + def delete(self) -> ConversationModelDeleteSchema: + response: ConversationModelDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=ConversationModelDeleteSchema, + ) + return response From b6a2f011f1532c1359be6a92e384cba8d056945c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:16:44 +0300 Subject: [PATCH 144/288] test(convo-model): add tests for convo model class Add unit tests for `ConversationModel` class - Introduce tests for the `ConversationModel` class - Cover `init`, `retrieve`, and `delete` methods - Validate correct API interactions using `requests_mock` - Ensure accurate handling of conversation models on the Typesense server - Utilize `assert_match_object`, `assert_object_lists_match`, and `assert_to_contain_keys` for validation --- tests/conversation_model_test.py | 162 +++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 tests/conversation_model_test.py diff --git a/tests/conversation_model_test.py b/tests/conversation_model_test.py new file mode 100644 index 0000000..4f46c7e --- /dev/null +++ b/tests/conversation_model_test.py @@ -0,0 +1,162 @@ +"""Tests for the ConversationModel class.""" + +from __future__ import annotations + +import pytest +import requests_mock +from dotenv import load_dotenv + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.api_call import ApiCall +from typesense.conversation_model import ConversationModel +from typesense.conversations_models import ConversationsModels +from typesense.types.conversations_model import ( + ConversationModelDeleteSchema, + ConversationModelSchema, +) + +load_dotenv() + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the ConversationModel object is initialized correctly.""" + conversation_model = ConversationModel( + fake_api_call, + "conversation_model_id", + ) + + assert conversation_model.model_id == "conversation_model_id" + assert_match_object(conversation_model.api_call, fake_api_call) + assert_object_lists_match(conversation_model.api_call.nodes, fake_api_call.nodes) + assert_match_object( + conversation_model.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + conversation_model._endpoint_path # noqa: WPS437 + == "/conversations/models/conversation_model_id" + ) + + +def test_retrieve(fake_conversation_model: ConversationModel) -> None: + """Test that the ConversationModel object can retrieve a conversation_model.""" + json_response: ConversationModelSchema = { + "api_key": "abc", + "id": "conversation_model_id", + "max_bytes": 1000000, + "model_name": "conversation_model_name", + "system_prompt": "This is a system prompt", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/conversations/models/conversation_model_id", + json=json_response, + ) + + response = fake_conversation_model.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/conversations/models/conversation_model_id" + ) + assert response == json_response + + +def test_delete(fake_conversation_model: ConversationModel) -> None: + """Test that the ConversationModel object can delete a conversation_model.""" + json_response: ConversationModelDeleteSchema = { + "id": "conversation_model_id", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/conversations/models/conversation_model_id", + json=json_response, + ) + + response = fake_conversation_model.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/conversations/models/conversation_model_id" + ) + assert response == json_response + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test it can retrieve a conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +def test_actual_update( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can update a conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].update( + {"system_prompt": "This is a new system prompt"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt" + assert response.get("id") == create_conversations_model + + +@pytest.mark.open_ai +def test_actual_delete( + actual_conversations_models: ConversationsModels, + delete_all_conversations_models: None, + create_conversations_model: str, +) -> None: + """Test that it can delete an conversation_model from Typesense Server.""" + response = actual_conversations_models[create_conversations_model].delete() + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + "ttl", + "history_collection", + ], + ) + + assert response.get("system_prompt") == "This is a system prompt" + assert response.get("id") == create_conversations_model + assert response.get("id") == create_conversations_model From 8a84a1e4e3d33dfac7f0a18fac4843164435e715 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 13:29:43 +0300 Subject: [PATCH 145/288] fix(types): make Locales type public --- src/typesense/types/collection.py | 4 ++-- src/typesense/types/synonym.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 830a2b0..cdb9317 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -32,7 +32,7 @@ _ReferenceFieldType = typing.Literal["string", "int32", "int64", "float"] -_Locales = typing.Literal["ja", "zh", "ko", "th", "el", "ru", "rs", "uk", "be", ""] +Locales = typing.Literal["ja", "zh", "ko", "th", "el", "ru", "rs", "uk", "be", ""] class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=False): @@ -61,7 +61,7 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals optional: typing.NotRequired[bool] infix: typing.NotRequired[bool] stem: typing.NotRequired[bool] - locale: typing.NotRequired[_Locales] + locale: typing.NotRequired[Locales] sort: typing.NotRequired[bool] store: typing.NotRequired[bool] num_dim: typing.NotRequired[float] diff --git a/src/typesense/types/synonym.py b/src/typesense/types/synonym.py index 5024248..3675c42 100644 --- a/src/typesense/types/synonym.py +++ b/src/typesense/types/synonym.py @@ -2,7 +2,7 @@ import sys -from typesense.types.collection import _Locales +from typesense.types.collection import Locales if sys.version_info >= (3, 11): import typing @@ -26,8 +26,8 @@ class SynonymCreateSchema(typing.TypedDict): synonyms: typing.List[str] root: typing.NotRequired[str] - locale: typing.NotRequired[_Locales] symbols_to_index: typing.NotRequired[list[str]] + locale: typing.NotRequired[Locales] class SynonymSchema(SynonymCreateSchema): From 354590209b2c79f1464f06a02c0d79bdd6281236 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 15:28:12 +0300 Subject: [PATCH 146/288] refactor: use typing module's native types instead of implict ones - Ensure compatibility with older python versions, as modules like the pipe operator for unions aren't supported in older python versions - This makes every typing definition compatible with python standards --- src/typesense/aliases.py | 9 +++- src/typesense/api_call.py | 46 ++++++++--------- src/typesense/collection.py | 5 +- src/typesense/collections.py | 8 +-- src/typesense/configuration.py | 16 +++--- src/typesense/exceptions.py | 15 ++++-- src/typesense/overrides.py | 11 +++- src/typesense/synonyms.py | 11 +++- src/typesense/types/collection.py | 12 +++-- src/typesense/types/override.py | 14 ++--- src/typesense/types/synonym.py | 4 +- tests/api_call_test.py | 85 +++++++++++++++++++++---------- tests/collections_test.py | 11 +++- 13 files changed, 159 insertions(+), 88 deletions(-) diff --git a/src/typesense/aliases.py b/src/typesense/aliases.py index c1a2926..dc94517 100644 --- a/src/typesense/aliases.py +++ b/src/typesense/aliases.py @@ -1,14 +1,21 @@ +import sys + from typesense.alias import Alias from typesense.api_call import ApiCall from typesense.types.alias import AliasCreateSchema, AliasesResponseSchema, AliasSchema +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class Aliases: RESOURCE_PATH = "/aliases" def __init__(self, api_call: ApiCall): self.api_call = api_call - self.aliases: dict[str, Alias] = {} + self.aliases: typing.Dict[str, Alias] = {} def __getitem__(self, name: str) -> Alias: if not self.aliases.get(name): diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 56c7541..fc1d872 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -63,8 +63,8 @@ from typesense.logger import logger session = requests.sessions.Session() -TParams = typing.TypeVar("TParams", bound=typing.Dict[str, typing.Any]) -TBody = typing.TypeVar("TBody", bound=typing.Dict[str, typing.Any]) +TParams = typing.TypeVar("TParams") +TBody = typing.TypeVar("TBody") TEntityDict = typing.TypeVar("TEntityDict") @@ -79,8 +79,8 @@ class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): verify (bool): Whether to verify """ - params: typing.NotRequired[TParams | None] - data: typing.NotRequired[TBody | str] + params: typing.NotRequired[typing.Union[TParams, None]] + data: typing.NotRequired[typing.Union[TBody, str, None]] timeout: float verify: bool @@ -175,7 +175,7 @@ def get_node(self) -> Node: return self.nodes[self.node_index] @staticmethod - def get_exception(http_code: int) -> type[TypesenseClientError]: + def get_exception(http_code: int) -> typing.Type[TypesenseClientError]: """ Return the exception class for a given HTTP status code. @@ -300,7 +300,7 @@ def make_request( entity_type: type[TEntityDict], as_json: bool, **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> TEntityDict | str: + ) -> typing.Union[TEntityDict, str]: """ Use a session function to make a request to the endpoint with the given kwargs. @@ -450,7 +450,7 @@ def get( endpoint: str, entity_type: type[TEntityDict], as_json: typing.Literal[False], - params: TParams | None = None, + params: typing.Union[TParams, None] = None, ) -> str: """ Make a GET request to the endpoint with the given parameters. @@ -491,7 +491,7 @@ def get( endpoint: str, entity_type: type[TEntityDict], as_json: typing.Literal[True], - params: TParams | None = None, + params: typing.Union[TParams, None] = None, ) -> TEntityDict: """ Make a GET request to the endpoint with the given parameters. @@ -530,9 +530,9 @@ def get( self, endpoint: str, entity_type: type[TEntityDict], - as_json: typing.Literal[True] | typing.Literal[False] = True, - params: TParams | None = None, - ) -> TEntityDict | str: + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + ) -> typing.Union[TEntityDict, str]: """ Make a GET request to the endpoint with the given parameters. @@ -580,9 +580,9 @@ def post( self, endpoint: str, entity_type: type[TEntityDict], - body: TBody, as_json: typing.Literal[False], - params: TParams | None = None, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, ) -> str: """ Make a POST request to the endpoint with the given parameters. @@ -623,9 +623,9 @@ def post( self, endpoint: str, entity_type: type[TEntityDict], - body: TBody, as_json: typing.Literal[True], - params: TParams | None = None, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, ) -> TEntityDict: """ Make a POST request to the endpoint with the given parameters. @@ -665,10 +665,10 @@ def post( self, endpoint: str, entity_type: type[TEntityDict], - body: TBody, - as_json: typing.Literal[True, False], - params: TParams | None = None, - ) -> str | TEntityDict: + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + params: typing.Union[TParams, None] = None, + body: typing.Union[TBody, None] = None, + ) -> typing.Union[str, TEntityDict]: """ Make a POST request to the endpoint with the given parameters. @@ -709,8 +709,8 @@ def post( endpoint, entity_type, as_json=as_json, - params=params, data=body, + params=params, timeout=self.config.connection_timeout_seconds, verify=self.config.verify, ) @@ -720,7 +720,7 @@ def put( endpoint: str, entity_type: type[TEntityDict], body: TBody, - params: TParams | None = None, + params: typing.Union[TParams, None] = None, ) -> TEntityDict: """ Make a PUT request to the endpoint with the given parameters. @@ -770,7 +770,7 @@ def patch( endpoint: str, entity_type: type[TEntityDict], body: TBody, - params: TParams | None = None, + params: typing.Union[TParams , None ]= None, ) -> TEntityDict: """ Make a PATCH request to the endpoint with the given parameters. @@ -819,7 +819,7 @@ def delete( self, endpoint: str, entity_type: type[TEntityDict], - params: TParams | None = None, + params: typing.Union[TParams , None ]= None, ) -> TEntityDict: """ Make a DELETE request to the endpoint with the given parameters. diff --git a/src/typesense/collection.py b/src/typesense/collection.py index 695cfd1..c317ccf 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -44,7 +44,10 @@ def update(self, schema_change: CollectionUpdateSchema) -> CollectionUpdateSchem return response # There's currently no parameters passed to Collection deletions, but ensuring future compatibility - def delete(self, params: dict[str, str | bool] | None = None) -> CollectionSchema: + def delete( + self, + params: typing.Union[typing.Dict[str, typing.Union[str, bool]], None] = None, + ) -> CollectionSchema: return self.api_call.delete( self._endpoint_path, entity_type=CollectionSchema, params=params ) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 35a2b68..ec9dac9 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -20,7 +20,7 @@ class Collections(object): def __init__(self, api_call: ApiCall): self.api_call = api_call - self.collections: dict[str, Collection] = {} + self.collections: typing.Dict[str, Collection] = {} def __getitem__(self, collection_name: str) -> Collection: if not self.collections.get(collection_name): @@ -38,10 +38,10 @@ def create(self, schema: CollectionCreateSchema) -> CollectionSchema: ) return call - def retrieve(self) -> list[CollectionSchema]: - call: list[CollectionSchema] = self.api_call.get( + def retrieve(self) -> typing.List[CollectionSchema]: + call: typing.List[CollectionSchema] = self.api_call.get( endpoint=Collections.RESOURCE_PATH, as_json=True, - entity_type=list[CollectionSchema], + entity_type=typing.List[CollectionSchema], ) return call diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index f37ce09..6bff96f 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -44,7 +44,7 @@ class NodeConfigDict(typing.TypedDict): host: str port: int path: typing.NotRequired[str] - protocol: typing.Literal["http", "https"] | str + protocol: typing.Union[typing.Literal["http", "https"], str] class ConfigDict(typing.TypedDict): @@ -78,7 +78,7 @@ class ConfigDict(typing.TypedDict): dictionaries or URLs that represent the read replica nodes. """ - nodes: list[typing.Union[str, NodeConfigDict]] + nodes: typing.List[typing.Union[str, NodeConfigDict]] nearest_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] api_key: str num_retries: typing.NotRequired[int] @@ -88,7 +88,7 @@ class ConfigDict(typing.TypedDict): timeout_seconds: typing.NotRequired[int] # deprecated master_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] # deprecated read_replica_nodes: typing.NotRequired[ - list[typing.Union[str, NodeConfigDict]] + typing.List[typing.Union[str, NodeConfigDict]] ] # deprecated @@ -107,9 +107,9 @@ class Node: def __init__( self, host: str, - port: str | int, + port: typing.Union[str, int], path: str, - protocol: typing.Literal["http", "https"] | str, + protocol: typing.Union[typing.Literal["http", "https"], str], ) -> None: """ Initialize a Node object with the specified host, port, path, and protocol. @@ -194,7 +194,7 @@ def __init__( self.validations.show_deprecation_warnings(config_dict) self.validations.validate_config_dict(config_dict) - self.nodes: list[Node] = [ + self.nodes: typing.List[Node] = [ self._initialize_nodes(node) for node in config_dict["nodes"] ] @@ -294,7 +294,7 @@ def validate_required_config_fields(config_dict: ConfigDict) -> None: raise ConfigError("`api_key` is not defined.") @staticmethod - def validate_nodes(nodes: list[typing.Union[str, NodeConfigDict]]) -> None: + def validate_nodes(nodes: typing.List[typing.Union[str, NodeConfigDict]]) -> None: """ Validate the nodes in the configuration dictionary. @@ -339,7 +339,7 @@ def validate_nearest_node(nearest_node: typing.Union[str, NodeConfigDict]) -> No ) @staticmethod - def validate_node_fields(node: str | NodeConfigDict) -> bool: + def validate_node_fields(node: typing.Union[str, NodeConfigDict]) -> bool: """ Validate the fields of a node in the configuration dictionary. diff --git a/src/typesense/exceptions.py b/src/typesense/exceptions.py index de5085d..338228f 100644 --- a/src/typesense/exceptions.py +++ b/src/typesense/exceptions.py @@ -1,11 +1,20 @@ from __future__ import annotations -from typing import Any +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing class TypesenseClientError(IOError): - def __init__(self, *args: object, **kwargs: dict[Any, Any]) -> None: - super(TypesenseClientError, self).__init__(*args, **kwargs) + def __init__( + self, + *args: object, + **kwargs: typing.Dict[typing.Any, typing.Any], + ) -> None: + super().__init__(*args, **kwargs) class ConfigError(TypesenseClientError): diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index a6a7484..15259e3 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -1,5 +1,7 @@ from __future__ import annotations +import sys + from typesense.api_call import ApiCall from typesense.types.override import ( OverrideCreateSchema, @@ -9,6 +11,11 @@ from .override import Override +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class Overrides(object): RESOURCE_PATH = 'overrides' @@ -20,7 +27,7 @@ def __init__( ) -> None: self.api_call = api_call self.collection_name = collection_name - self.overrides: dict[str, Override] = {} + self.overrides: typing.Dict[str, Override] = {} def __getitem__(self, override_id: str) -> Override: if not self.overrides.get(override_id): @@ -29,7 +36,7 @@ def __getitem__(self, override_id: str) -> Override: ) return self.overrides[override_id] - def _endpoint_path(self, override_id: str | None = None) -> str: + def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: from .collections import Collections override_id = override_id or "" diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 77b4bc6..8d7c6a3 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -1,3 +1,5 @@ +import sys + from typesense.api_call import ApiCall from typesense.types.synonym import ( SynonymCreateSchema, @@ -7,6 +9,11 @@ from .synonym import Synonym +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + class Synonyms(object): RESOURCE_PATH = 'synonyms' @@ -14,7 +21,7 @@ class Synonyms(object): def __init__(self, api_call: ApiCall, collection_name: str): self.api_call = api_call self.collection_name = collection_name - self.synonyms: dict[str, Synonym] = {} + self.synonyms: typing.Dict[str, Synonym] = {} def __getitem__(self, synonym_id: str) -> Synonym: if not self.synonyms.get(synonym_id): @@ -24,7 +31,7 @@ def __getitem__(self, synonym_id: str) -> Synonym: return self.synonyms[synonym_id] - def _endpoint_path(self, synonym_id: str | None = None) -> str: + def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: from typesense.collections import Collections synonym_id = synonym_id or "" diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index cdb9317..b2366dc 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -67,7 +67,7 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals num_dim: typing.NotRequired[float] range_index: typing.NotRequired[bool] index: typing.NotRequired[bool] - vec_dist: typing.NotRequired[typing.Literal["cosine", "ip"] | str] + vec_dist: typing.NotRequired[typing.Union[typing.Literal["cosine", "ip"], str]] class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): @@ -149,10 +149,12 @@ class CollectionCreateSchema(typing.TypedDict): """ name: str - fields: list[RegularCollectionFieldSchema | ReferenceCollectionFieldSchema] + fields: typing.List[ + typing.Union[RegularCollectionFieldSchema, ReferenceCollectionFieldSchema] + ] default_sorting_field: typing.NotRequired[str] - symbols_to_index: typing.NotRequired[list[str]] - token_separators: typing.NotRequired[list[str]] + symbols_to_index: typing.NotRequired[typing.List[str]] + token_separators: typing.NotRequired[typing.List[str]] enable_nested_fields: typing.NotRequired[bool] voice_query_model: typing.NotRequired[VoiceQueryModelSchema] @@ -198,7 +200,7 @@ class CollectionUpdateSchema(typing.TypedDict): """ - fields: list[ + fields: typing.List[ typing.Union[ RegularCollectionFieldSchema, ReferenceCollectionFieldSchema, diff --git a/src/typesense/types/override.py b/src/typesense/types/override.py index 8009fb9..134716c 100644 --- a/src/typesense/types/override.py +++ b/src/typesense/types/override.py @@ -22,7 +22,7 @@ class OverrideQueryRuleSchema(typing.TypedDict): query: str match: typing.Literal["contains", "exact"] filter_by: typing.NotRequired[str] - tags: typing.NotRequired[list[str]] + tags: typing.NotRequired[typing.List[str]] class OverrideFilterSchema(typing.TypedDict): @@ -35,7 +35,7 @@ class OverrideFilterSchema(typing.TypedDict): """ filter_by: str - tags: typing.NotRequired[list[str]] + tags: typing.NotRequired[typing.List[str]] class IncludesSchema(typing.TypedDict): @@ -69,13 +69,13 @@ class OverrideCreateSchema(typing.TypedDict): stop_processing (bool): Whether to stop processing. """ - rule: OverrideQueryRuleSchema | OverrideFilterSchema + rule: typing.Union[OverrideQueryRuleSchema, OverrideFilterSchema] sort_by: typing.NotRequired[str] filter_by: typing.NotRequired[str] - excludes: typing.NotRequired[list[str]] + excludes: typing.NotRequired[typing.List[str]] replace_query: typing.NotRequired[str] - includes: typing.NotRequired[list[IncludesSchema]] - metadata: typing.NotRequired[dict[str, str]] + includes: typing.NotRequired[typing.List[IncludesSchema]] + metadata: typing.NotRequired[typing.Dict[str, str]] filter_curated_hits: typing.NotRequired[bool] effective_from_ts: typing.NotRequired[int] effective_to_ts: typing.NotRequired[int] @@ -97,4 +97,4 @@ class OverrideDeleteSchema(typing.TypedDict): class OverrideRetrieveSchema(typing.TypedDict): """The schema for the response of the Overrides.retrieve method.""" - overrides: list[OverrideSchema] + overrides: typing.List[OverrideSchema] diff --git a/src/typesense/types/synonym.py b/src/typesense/types/synonym.py index 3675c42..718df5c 100644 --- a/src/typesense/types/synonym.py +++ b/src/typesense/types/synonym.py @@ -26,8 +26,8 @@ class SynonymCreateSchema(typing.TypedDict): synonyms: typing.List[str] root: typing.NotRequired[str] - symbols_to_index: typing.NotRequired[list[str]] locale: typing.NotRequired[Locales] + symbols_to_index: typing.NotRequired[typing.List[str]] class SynonymSchema(SynonymCreateSchema): @@ -57,7 +57,7 @@ class SynonymsRetrieveSchema(typing.TypedDict): synonyms(list[SynonymSchema]): The list of synonyms. """ - synonyms: list[SynonymSchema] + synonyms: typing.List[SynonymSchema] class SynonymDeleteSchema(typing.TypedDict): diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 481fe1b..c918509 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -2,8 +2,13 @@ from __future__ import annotations +import sys import time -from typing import Dict + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing import pytest import requests @@ -131,7 +136,7 @@ def test_get_exception() -> None: def test_normalize_params_with_booleans() -> None: """Test that it correctly normalizes boolean values to strings.""" - parameter_dict: Dict[str, str | bool] = {"key1": True, "key2": False} + parameter_dict: typing.Dict[str, str | bool] = {"key1": True, "key2": False} ApiCall.normalize_params(parameter_dict) assert parameter_dict == {"key1": "true", "key2": "false"} @@ -151,7 +156,7 @@ def test_normalize_params_with_mixed_types() -> None: def test_normalize_params_with_empty_dict() -> None: """Test that it correctly normalizes an empty dictionary.""" - parameter_dict: Dict[str, str] = {} + parameter_dict: typing.Dict[str, str] = {} ApiCall.normalize_params(parameter_dict) assert not parameter_dict @@ -175,7 +180,10 @@ def test_make_request_as_json(api_call: ApiCall) -> None: ) response = api_call.make_request( - session.get, "/test", as_json=True, entity_type=dict[str, str] + session.get, + "/test", + as_json=True, + entity_type=typing.Dict[str, str], ) assert response == {"key": "value"} @@ -192,7 +200,10 @@ def test_make_request_as_text(api_call: ApiCall) -> None: ) response = api_call.make_request( - session.get, "/test", as_json=False, entity_type=dict[str, str] + session.get, + "/test", + as_json=False, + entity_type=typing.Dict[str, str], ) assert response == "response text" @@ -207,9 +218,11 @@ def test_get_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.get("/test", as_json=True, entity_type=dict[str, str]) == { - "key": "value" - } + assert api_call.get( + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) == {"key": "value"} def test_get_as_text( @@ -223,7 +236,7 @@ def test_get_as_text( status_code=200, ) assert ( - api_call.get("/test", as_json=False, entity_type=dict[str, str]) + api_call.get("/test", as_json=False, entity_type=typing.Dict[str, str]) == "response text" ) @@ -239,7 +252,10 @@ def test_post_as_json( status_code=200, ) assert api_call.post( - "/test", body={"data": "value"}, as_json=True, entity_type=dict[str, str] + "/test", + body={"data": "value"}, + as_json=True, + entity_type=typing.Dict[str, str], ) == { "key": "value", } @@ -263,7 +279,7 @@ def test_post_with_params( params=parameter_set, body={"key": "value"}, as_json=True, - entity_type=dict[str, str], + entity_type=typing.Dict[str, str], ) expected_parameter_set = { @@ -292,7 +308,7 @@ def test_post_as_text( "/test", body={"data": "value"}, as_json=False, - entity_type=dict[str, str], + entity_type=typing.Dict[str, str], ) assert post_result == "response text" @@ -310,7 +326,7 @@ def test_put_as_json( assert api_call.put( "/test", body={"data": "value"}, - entity_type=dict[str, str], + entity_type=typing.Dict[str, str], ) == {"key": "value"} @@ -327,7 +343,7 @@ def test_patch_as_json( assert api_call.patch( "/test", body={"data": "value"}, - entity_type=dict[str, str], + entity_type=typing.Dict[str, str], ) == {"key": "value"} @@ -342,7 +358,7 @@ def test_delete_as_json( status_code=200, ) - response = api_call.delete("/test", entity_type=dict[str, str]) + response = api_call.delete("/test", entity_type=typing.Dict[str, str]) assert response == {"key": "value"} @@ -360,7 +376,10 @@ def test_raise_custom_exception_with_header( with pytest.raises(exceptions.RequestMalformed) as exception: api_call.make_request( - requests.get, "/test", as_json=True, entity_type=dict[str, str] + requests.get, + "/test", + as_json=True, + entity_type=typing.Dict[str, str], ) assert str(exception.value) == "[Errno 400] Test error" @@ -378,7 +397,10 @@ def test_raise_custom_exception_without_header( with pytest.raises(exceptions.RequestMalformed) as exception: api_call.make_request( - requests.get, "/test", as_json=True, entity_type=dict[str, str] + requests.get, + "/test", + as_json=True, + entity_type=typing.Dict[str, str], ) assert str(exception.value) == "[Errno 400] API error." @@ -403,7 +425,11 @@ def test_selects_next_available_node_on_timeout( status_code=200, ) - response = api_call.get("/test", as_json=True, entity_type=dict[str, str]) + response = api_call.get( + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) assert response == {"key": "value"} assert request_mocker.request_history[0].url == "http://node0:8108/test" @@ -426,10 +452,10 @@ def test_raises_if_no_nodes_are_healthy_with_the_last_exception( request_mocker.get("http://node2:8108/", exc=requests.exceptions.SSLError) with pytest.raises(requests.exceptions.SSLError): - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) -def test_uses_nearest_node_if_present_and_healthy( +def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 mocker: MockerFixture, api_call: ApiCall, ) -> None: @@ -457,15 +483,18 @@ def test_uses_nearest_node_if_present_and_healthy( # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # Advance time by 5 seconds mocker.patch("time.time", return_value=current_time + 5) - api_call.get('/', entity_type=dict[str, str]) # 1 should go to node2 and resolve the request: 1 request + api_call.get( + "/", + entity_type=typing.Dict[str, str], + ) # 1 should go to node2 and resolve the request: 1 request # Advance time by 65 seconds mocker.patch("time.time", return_value=current_time + 65) @@ -474,7 +503,7 @@ def test_uses_nearest_node_if_present_and_healthy( # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # Advance time by 185 seconds mocker.patch("time.time", return_value=current_time + 185) @@ -487,11 +516,11 @@ def test_uses_nearest_node_if_present_and_healthy( ) # 1 should go to nearest and resolve the request: 1 request - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get('/', entity_type=dict[str, str]) + api_call.get("/", entity_type=typing.Dict[str, str]) # Check the request history assert request_mocker.request_history[0].url == "http://nearest:8108/" diff --git a/tests/collections_test.py b/tests/collections_test.py index 18ea40d..b42e634 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -2,8 +2,15 @@ from __future__ import annotations +import sys + import requests_mock +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + from tests.utils.object_assertions import assert_match_object, assert_object_lists_match from typesense.api_call import ApiCall from typesense.collections import Collections @@ -53,7 +60,7 @@ def test_get_existing_collection(fake_collections: Collections) -> None: def test_retrieve(fake_collections: Collections) -> None: """Test that the Collections object can retrieve collections.""" - json_response: list[CollectionSchema] = [ + json_response: typing.List[CollectionSchema] = [ { "created_at": 1619711487, "default_sorting_field": "num_employees", @@ -238,7 +245,7 @@ def test_actual_retrieve( """Test that the Collections object can retrieve collections.""" response = actual_collections.retrieve() - expected: list[CollectionSchema] = [ + expected: typing.List[CollectionSchema] = [ { "default_sorting_field": "num_employees", "enable_nested_fields": False, From 9628540d45ff65829d2b9e398d198e5bc5103f3b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 15:30:07 +0300 Subject: [PATCH 147/288] fix(collection-test): remove created at expected value - The tests may take some time and in CI environments, this creates a flaky test, as it can break if more than a second passes --- tests/collection_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/collection_test.py b/tests/collection_test.py index 0dbaeb5..ad02fb1 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -181,7 +181,6 @@ def test_actual_retrieve( response = actual_collections["companies"].retrieve() expected: CollectionSchema = { - "created_at": int(time.time()), "default_sorting_field": "num_employees", "enable_nested_fields": False, "fields": [ @@ -214,6 +213,8 @@ def test_actual_retrieve( "token_separators": [], } + response.pop("created_at") + assert response == expected From faed5916dcfa34ef7488facb8eebe853b41822ba Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 17:39:53 +0300 Subject: [PATCH 148/288] feat(debug): add types for debug class - Introduce `DebugResponseSchema` using `TypedDict` to define the response structure for the debug endpoint. - Include attributes `state` (int) and `version` (str) to capture the server's state and version. - Ensure compatibility with Python versions prior to 3.11 by conditionally importing `typing_extensions`. --- src/typesense/types/debug.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 src/typesense/types/debug.py diff --git a/src/typesense/types/debug.py b/src/typesense/types/debug.py new file mode 100644 index 0000000..4394896 --- /dev/null +++ b/src/typesense/types/debug.py @@ -0,0 +1,21 @@ +"""Types for the debug endpoint.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class DebugResponseSchema(typing.TypedDict): + """ + Response schema for the debug endpoint. + + Attributes: + state (int): The state of the Typesense server. + version (str): The version of the Typesense server. + """ + + state: int + version: str From 3ffed6bc706c8f794f39063f866329e6e900d3e8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 17:49:44 +0300 Subject: [PATCH 149/288] feat(test-utils): add fixtures for debug - Introduce `actual_debug` fixture to provide a Debug instance for real API interactions in tests. - Add `fake_debug` fixture for testing with mock values. --- tests/conftest.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 8eb0531..9bece7a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,6 +15,7 @@ from typesense.configuration import Configuration from typesense.conversation_model import ConversationModel from typesense.conversations_models import ConversationsModels +from typesense.debug import Debug from typesense.document import Document from typesense.documents import Documents from typesense.key import Key @@ -408,6 +409,12 @@ def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: return Documents(actual_api_call, "companies") +@pytest.fixture(scope="function", name="actual_debug") +def actual_debug_fixture(actual_api_call: ApiCall) -> Debug: + """Return a Debug object using a real API.""" + return Debug(actual_api_call) + + @pytest.fixture(scope="function", name="actual_multi_search") def actual_multi_search_fixture(actual_api_call: ApiCall) -> MultiSearch: """Return a MultiSearch object using a real API.""" @@ -626,6 +633,12 @@ def fake_document_fixture(fake_api_call: ApiCall) -> Document: return Document(fake_api_call, "companies", "0") +@pytest.fixture(scope="function", name="fake_debug") +def fake_debug_fixture(fake_api_call: ApiCall) -> Debug: + """Return a debug object with test values.""" + return Debug(fake_api_call) + + class Company(typing.TypedDict): """Company data type.""" From 51872699f77a3c4d049f28536dbbe4aa6895c09e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 17:40:51 +0300 Subject: [PATCH 150/288] feat(debug): add type hints to debug class Enhance `Debug` class with typed `retrieve` method - Update the `Debug` class to use `DebugResponseSchema` for the `retrieve` method, ensuring the response is correctly typed. - Add type annotations to the `__init__` method to specify the `ApiCall` dependency. - Improve method readability by adding the `as_json` and `entity_type` parameters to the `retrieve` method. --- src/typesense/debug.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/typesense/debug.py b/src/typesense/debug.py index e0e8128..cd1f8c6 100644 --- a/src/typesense/debug.py +++ b/src/typesense/debug.py @@ -1,9 +1,16 @@ +from typesense.api_call import ApiCall +from typesense.types.debug import DebugResponseSchema + + class Debug(object): - RESOURCE_PATH = '/debug' + RESOURCE_PATH = "/debug" - def __init__(self,api_call): + def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - self.collections = {} - def retrieve(self): - return self.api_call.get('{0}'.format(Debug.RESOURCE_PATH)) + def retrieve(self) -> DebugResponseSchema: + return self.api_call.get( + "{0}".format(Debug.RESOURCE_PATH), + as_json=True, + entity_type=DebugResponseSchema, + ) From fba56a8c35fcdd89661d949eae60e311e22bd433 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 17:45:35 +0300 Subject: [PATCH 151/288] test(debug): add tests for debug module - Added unit tests to ensure the `Debug` class is initialized correctly. - Verified that the `retrieve()` method functions properly, both with mocked responses and actual server data. - Used assertions to check consistency in API call configurations and response handling. --- tests/debug_test.py | 52 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 tests/debug_test.py diff --git a/tests/debug_test.py b/tests/debug_test.py new file mode 100644 index 0000000..942ec9a --- /dev/null +++ b/tests/debug_test.py @@ -0,0 +1,52 @@ +"""Tests for the Debug class.""" + +from __future__ import annotations + +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.api_call import ApiCall +from typesense.debug import Debug +from typesense.types.debug import DebugResponseSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the Debug object is initialized correctly.""" + debug = Debug( + fake_api_call, + ) + + assert_match_object(debug.api_call, fake_api_call) + assert_object_lists_match(debug.api_call.nodes, fake_api_call.nodes) + assert_match_object( + debug.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert debug.RESOURCE_PATH == "/debug" # noqa: WPS437 + + +def test_retrieve(fake_debug: Debug) -> None: + """Test that the Debug object can retrieve a debug.""" + json_response: DebugResponseSchema = {"state": 1, "version": "27.0"} + + with requests_mock.Mocker() as mock: + mock.get( + "/debug", + json=json_response, + ) + + response = fake_debug.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert mock.request_history[0].url == "http://nearest:8108/debug" + assert response == json_response + + +def test_actual_retrieve(actual_debug: Debug) -> None: + """Test that the Debug object can retrieve a debug on Typesense server.""" + json_response: DebugResponseSchema = {"state": 1, "version": "27.0"} + + response = actual_debug.retrieve() + + assert response == json_response From fb2fa0eecda69853251e1e4b7f9441f2c8e28594 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 17:48:39 +0300 Subject: [PATCH 152/288] test: add import tests for compatibility with python 3.11+ - Added tests to ensure proper importing of `typing` and `typing_extensions` modules. - Verified that `typing_extensions` is used for Python versions < 3.11. - Ensured that the `typing` module is correctly imported for Python 3.11 and later. - Dynamically tested imports across various Typesense modules for consistency in typing behavior. --- tests/import_test.py | 106 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 tests/import_test.py diff --git a/tests/import_test.py b/tests/import_test.py new file mode 100644 index 0000000..f767aa0 --- /dev/null +++ b/tests/import_test.py @@ -0,0 +1,106 @@ +# mypy: disable-error-code="unreachable" +"""Test that the typing_extensions module is imported when Python version < 3.11.""" + +import importlib +import sys +from collections import namedtuple + +import pytest +from pytest_mock import MockFixture + +typing_module_names = [ + "alias", + "analytics_rule", + "collection", + "conversations_model", + "debug", + "document", + "key", + "multi_search", + "operations", + "override", + "stopword", + "synonym", +] + +module_names = [ + "aliases", + "analytics_rule", + "analytics_rules", + "api_call", + "client", + "collection", + "collections", + "configuration", + "conversations_models", + "document", + "documents", + "exceptions", + "keys", + "multi_search", + "overrides", + "operations", + "synonyms", + "preprocess", + "stopwords", +] + +# Create a namedtuple to mock sys.version_info +VersionInfo = namedtuple( + "VersionInfo", + ["major", "minor", "micro", "releaselevel", "serial"], +) + + +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="Test is only for Python < 3.11", +) +def test_import_typing(mocker: MockFixture) -> None: + """Test that the typing module is imported when Python version is 3.11 or higher.""" + mock_version_info = VersionInfo(3, 11, 0, "final", 0) + mocker.patch.object(sys, "version_info", mock_version_info) + + # Import modules dynamically and assign them to a list + modules = [importlib.import_module(f"typesense.{name}") for name in module_names] + typing_modules = [ + importlib.import_module(f"typesense.types.{name}") + for name in typing_module_names + ] + + for module in modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing") + + for module in typing_modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing") + + +def test_import_typing_extensions(mocker: MockFixture) -> None: + """Test that the typing_extensions module is imported when Python version < 3.11.""" + mock_version_info = VersionInfo(3, 10, 0, "final", 0) + mocker.patch.object(sys, "version_info", mock_version_info) + + # Import modules dynamically and assign them to a list + init_imports = [ + importlib.import_module(f"typesense.{name}") for name in module_names + ] + modules = [importlib.reload(import_module) for import_module in init_imports] + + init_typing_imports = [ + importlib.import_module(f"typesense.types.{name}") + for name in typing_module_names + ] + + typing_modules = [ + importlib.reload(import_module) for import_module in init_typing_imports + ] + + for module in modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing_extensions") + + for module in typing_modules: + assert "typing" in module.__dict__ + assert module.typing == importlib.import_module("typing_extensions") From 5f46d827808b4ccbce1cf6f6efe054e6e8579763 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 18:39:29 +0300 Subject: [PATCH 153/288] refactor: refactor collection and collections to support generic typing - Introduce `typing.Generic[TDoc]` to `Collection` and `Collections` classes, enhancing type safety by ensuring consistent usage of `DocumentSchema`. - Update the `documents` attribute in `Collection` to use the generic type `TDoc`, allowing for better type-checking and documentation. - Modify `__getitem__` method in `Collections` to return a `Collection[TDoc]`, improving the reliability of collection operations. These changes improve the robustness and maintainability of the codebase by leveraging Python's typing system for safer and clearer code. --- src/typesense/collection.py | 10 +++++++--- src/typesense/collections.py | 11 +++++++---- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/typesense/collection.py b/src/typesense/collection.py index c317ccf..9ba00f3 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -1,6 +1,7 @@ from __future__ import annotations import sys + from typesense.types.collection import CollectionSchema, CollectionUpdateSchema if sys.version_info >= (3, 11): @@ -9,17 +10,20 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.types.document import DocumentSchema + +from .documents import Documents from .overrides import Overrides from .synonyms import Synonyms -from .documents import Documents +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) -class Collection(object): +class Collection(typing.Generic[TDoc]): def __init__(self, api_call: ApiCall, name: str): self.name = name self.api_call = api_call - self.documents = Documents(api_call, name) + self.documents = Documents[TDoc](api_call, name) self.overrides = Overrides(api_call, name) self.synonyms = Synonyms(api_call, name) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index ec9dac9..2f12c91 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -11,18 +11,21 @@ from typesense.api_call import ApiCall from typesense.types.collection import CollectionCreateSchema, CollectionSchema +from typesense.types.document import DocumentSchema from .collection import Collection +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) -class Collections(object): - RESOURCE_PATH = '/collections' + +class Collections(typing.Generic[TDoc]): + RESOURCE_PATH = "/collections" def __init__(self, api_call: ApiCall): self.api_call = api_call - self.collections: typing.Dict[str, Collection] = {} + self.collections: typing.Dict[str, Collection[TDoc]] = {} - def __getitem__(self, collection_name: str) -> Collection: + def __getitem__(self, collection_name: str) -> Collection[TDoc]: if not self.collections.get(collection_name): self.collections[collection_name] = Collection( self.api_call, collection_name From 1f227e3568a9eaff54ce976e37ecf5198d396bd2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 18:40:40 +0300 Subject: [PATCH 154/288] test(api_call): add tests for API call parameter normalization and node selection - Implement tests for `ApiCall.normalize_params` to ensure correct handling of non-dictionary inputs and mixed data types, improving input validation and robustness. - Add tests for node selection logic in `ApiCall`: - Verify behavior when no healthy nodes are available, ensuring proper logging and fallback node selection. - Ensure exception is raised when no nodes are healthy during a request. - Add a docstring to `tests/__init__.py` to clarify the purpose of the tests module. These tests enhance coverage and ensure stability in key areas of the API call logic. --- tests/api_call_test.py | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/tests/api_call_test.py b/tests/api_call_test.py index c918509..63a6b3a 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -2,9 +2,12 @@ from __future__ import annotations +import logging import sys import time +from pytest_mock import MockFixture + if sys.version_info >= (3, 11): import typing else: @@ -19,6 +22,7 @@ from typesense import exceptions from typesense.api_call import ApiCall from typesense.configuration import Configuration, Node +from typesense.logger import logger @pytest.fixture(scope="function", name="config") @@ -142,6 +146,14 @@ def test_normalize_params_with_booleans() -> None: assert parameter_dict == {"key1": "true", "key2": "false"} +def test_normalize_params_with_non_dict() -> None: + """Test that it raises when a non-dictionary is passed.""" + parameter_non_dict = "string" + + with pytest.raises(ValueError): + ApiCall.normalize_params(parameter_non_dict) + + def test_normalize_params_with_mixed_types() -> None: """Test that it correctly normalizes boolean values to strings.""" parameter_dict = {"key1": True, "key2": False, "key3": "value", "key4": 123} @@ -438,6 +450,32 @@ def test_selects_next_available_node_on_timeout( assert request_mocker.call_count == 3 +def test_get_node_no_healthy_nodes( + api_call: ApiCall, + mocker: MockFixture, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that it logs a message if no healthy nodes are found.""" + for api_node in api_call.nodes: + api_node.healthy = False + + api_call.config.nearest_node.healthy = False + + mocker.patch.object(api_call, "node_due_for_health_check", return_value=False) + + # Need to set the logger level to DEBUG to capture the message + logger.setLevel(logging.DEBUG) + + selected_node = api_call.get_node() + + with caplog.at_level(logging.DEBUG): + assert "No healthy nodes were found. Returning the next node." in caplog.text + + assert selected_node == api_call.nodes[api_call.node_index] + + assert api_call.node_index == 0 + + def test_raises_if_no_nodes_are_healthy_with_the_last_exception( api_call: ApiCall, ) -> None: From 89847df1d663f95f72ec4734d6dc82612877c12a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 18:41:09 +0300 Subject: [PATCH 155/288] fix(collection-tests): fix collection tests to adhere with v27 changes - v27 introduces changes to response schemas for collections, this commit introduces changes to match the response from Typesense server. - Add the `store` parameter to expected responses - Remove unused parameters from update responses --- tests/collection_test.py | 9 ++------- tests/collections_test.py | 4 ++++ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/collection_test.py b/tests/collection_test.py index ad02fb1..0d0ef00 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -194,6 +194,7 @@ def test_actual_retrieve( "sort": False, "infix": False, "stem": False, + "store": True, }, { "name": "num_employees", @@ -205,6 +206,7 @@ def test_actual_retrieve( "sort": True, "infix": False, "stem": False, + "store": True, }, ], "name": "companies", @@ -233,13 +235,6 @@ def test_actual_update( { "name": "num_locations", "type": "int32", - "facet": False, - "index": True, - "optional": False, - "locale": "", - "sort": True, - "infix": False, - "stem": False, }, ], } diff --git a/tests/collections_test.py b/tests/collections_test.py index b42e634..b345740 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -196,6 +196,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "sort": False, "infix": False, "stem": False, + "store": True, }, { "name": "num_employees", @@ -207,6 +208,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "sort": False, "infix": False, "stem": False, + "store": True, }, ], "name": "companies", @@ -260,6 +262,7 @@ def test_actual_retrieve( "sort": False, "infix": False, "stem": False, + "store": True, }, { "name": "num_employees", @@ -271,6 +274,7 @@ def test_actual_retrieve( "sort": True, "infix": False, "stem": False, + "store": True, }, ], "name": "companies", From c24adf6a588cef19708c450281ebbe4ad1096ccf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 19:07:28 +0300 Subject: [PATCH 156/288] style: update styling to use double quotes --- src/typesense/overrides.py | 2 +- src/typesense/synonyms.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index 15259e3..fdbfc50 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -18,7 +18,7 @@ class Overrides(object): - RESOURCE_PATH = 'overrides' + RESOURCE_PATH = "overrides" def __init__( self, diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 8d7c6a3..0bc3a5a 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -16,7 +16,7 @@ class Synonyms(object): - RESOURCE_PATH = 'synonyms' + RESOURCE_PATH = "synonyms" def __init__(self, api_call: ApiCall, collection_name: str): self.api_call = api_call From 2b5d8c29bc72c50929ad54abd75fd77450ae5c5b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 19:08:16 +0300 Subject: [PATCH 157/288] feat(validation): add type hints to validation functions - Update the `validate_search` function to explicitly define its parameter type as `typing.Mapping[str, str]`, ensuring better type safety and clarity. - Improve error handling by refining the `InvalidParameter` exception message, making it more informative by indicating the expected type and the actual type provided. These changes increase robustness and readability in parameter validation for search operations. --- src/typesense/validation.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/typesense/validation.py b/src/typesense/validation.py index 96d2063..8dc8310 100644 --- a/src/typesense/validation.py +++ b/src/typesense/validation.py @@ -1,7 +1,18 @@ +from __future__ import annotations + +import sys + +if sys.version_info > (3, 11): + import typing +else: + import typing_extensions as typing + from typesense.exceptions import InvalidParameter -def validate_search(params): +def validate_search(params: typing.Mapping[str, str]) -> None: for key in params: if not isinstance(params[key], str): - raise InvalidParameter(f"'{key}' field expected a string but was given {type(params[key]).__name__}") + raise InvalidParameter( + f"'{key}' field expected a string but was given {type(params[key]).__name__}" + ) From 9c84c6c77c3b137ed676b699c91a864adedbb30a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 30 Aug 2024 19:10:03 +0300 Subject: [PATCH 158/288] fix: remove unused utils on node tests --- tests/node_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/node_test.py b/tests/node_test.py index 2cfac4f..23ab20d 100644 --- a/tests/node_test.py +++ b/tests/node_test.py @@ -4,7 +4,7 @@ import pytest -from tests.utils.object_assertions import assert_match_object, assert_to_contain_object +from tests.utils.object_assertions import assert_match_object from typesense.configuration import Node from typesense.exceptions import ConfigError From 43fe9766734a95ec91d6c51e253efbf5e1488bb2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 12:08:54 +0300 Subject: [PATCH 159/288] fix(override): remove unused modules in override class --- src/typesense/override.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/typesense/override.py b/src/typesense/override.py index ba2a47d..053ef5e 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -1,7 +1,3 @@ -from __future__ import annotations - -import sys - from typesense.api_call import ApiCall from typesense.types.override import OverrideDeleteSchema, OverrideSchema From 165a899fce39125d15493f40a0283376d44f1045 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 12:10:01 +0300 Subject: [PATCH 160/288] fix(api-call): runtime check for api request params type - Throw if the parameters passed aren't in dict type --- src/typesense/api_call.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index fc1d872..43475a8 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -438,6 +438,8 @@ def normalize_params(params: TParams) -> None: Args: params (TParams): The request parameters. """ + if not isinstance(params, typing.Dict): + raise ValueError("Params must be a dictionary.") for key in params.keys(): if isinstance(params[key], bool) and params[key]: params[key] = "true" From e924474ddaebbe4153bbf5ddfd54acc60fa687bc Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 12:10:50 +0300 Subject: [PATCH 161/288] fix(api-call): use literals for bool overloads - Ensure that mypy properly understands the possible values of bool parameters for overloads --- src/typesense/api_call.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 43475a8..9854406 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -298,7 +298,7 @@ def make_request( fn: typing.Callable[..., requests.models.Response], endpoint: str, entity_type: type[TEntityDict], - as_json: bool, + as_json: typing.Union[typing.Literal[False], typing.Literal[True]], **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], ) -> typing.Union[TEntityDict, str]: """ From 23b77bb37f59c9abf97e052e452151d2f9747c37 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 15:27:21 +0300 Subject: [PATCH 162/288] refactor(pytest): refactor test fixtures into separate files This commit reorganizes the test fixtures by moving them from a single conftest.py file into individual files within a new 'fixtures' directory. Each fixture file is now dedicated to a specific component or functionality, improving modularity and maintainability of the test suite. Key changes: - Create new 'fixtures' directory under 'tests' - Split fixtures into separate files based on functionality - Update conftest.py to use pytest_plugins for fixture discovery - Remove duplicate fixture definitions - Improve import organization and remove unused imports --- tests/conftest.py | 663 +----------------- tests/document_test.py | 6 +- tests/documents_test.py | 66 +- tests/fixtures/alias_fixtures.py | 64 ++ tests/fixtures/analytics_rule_fixtures.py | 96 +++ tests/fixtures/api_call_fixtures.py | 20 + tests/fixtures/client_fixtures.py | 20 + tests/fixtures/collections_fixtures.py | 93 +++ tests/fixtures/configuration_fixtures.py | 71 ++ tests/fixtures/conversation_model_fixtures.py | 107 +++ tests/fixtures/debug_fixtures.py | 18 + tests/fixtures/document_fixtures.py | 77 ++ tests/fixtures/key_fixtures.py | 63 ++ tests/fixtures/multi_search_fixtures.py | 12 + tests/fixtures/operation_fixtures.py | 18 + tests/fixtures/override_fixtures.py | 40 ++ tests/fixtures/stopword_fixtures.py | 69 ++ tests/fixtures/synonym_fixtures.py | 44 ++ tests/multi_search_test.py | 6 +- 19 files changed, 857 insertions(+), 696 deletions(-) create mode 100644 tests/fixtures/alias_fixtures.py create mode 100644 tests/fixtures/analytics_rule_fixtures.py create mode 100644 tests/fixtures/api_call_fixtures.py create mode 100644 tests/fixtures/client_fixtures.py create mode 100644 tests/fixtures/collections_fixtures.py create mode 100644 tests/fixtures/configuration_fixtures.py create mode 100644 tests/fixtures/conversation_model_fixtures.py create mode 100644 tests/fixtures/debug_fixtures.py create mode 100644 tests/fixtures/document_fixtures.py create mode 100644 tests/fixtures/key_fixtures.py create mode 100644 tests/fixtures/multi_search_fixtures.py create mode 100644 tests/fixtures/operation_fixtures.py create mode 100644 tests/fixtures/override_fixtures.py create mode 100644 tests/fixtures/stopword_fixtures.py create mode 100644 tests/fixtures/synonym_fixtures.py diff --git a/tests/conftest.py b/tests/conftest.py index 9bece7a..595a742 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,663 +1,12 @@ """Pytest configuration file.""" -import pytest -import requests -from faker import Faker -from faker.providers import company +from glob import glob -from typesense.alias import Alias -from typesense.aliases import Aliases -from typesense.analytics_rule import AnalyticsRule -from typesense.analytics_rules import AnalyticsRules -from typesense.api_call import ApiCall -from typesense.collection import Collection -from typesense.collections import Collections -from typesense.configuration import Configuration -from typesense.conversation_model import ConversationModel -from typesense.conversations_models import ConversationsModels -from typesense.debug import Debug -from typesense.document import Document -from typesense.documents import Documents -from typesense.key import Key -from typesense.keys import Keys -from typesense.multi_search import MultiSearch -from typesense.operations import Operations -from typesense.override import Override -from typesense.overrides import Overrides -from typesense.stopwords import Stopwords -from typesense.stopwords_set import StopwordsSet -from typesense.synonym import Synonym -from typesense.synonyms import Synonyms +import pytest pytest.register_assert_rewrite("utils.object_assertions") -fake = Faker() -fake.add_provider(company) - - -@pytest.fixture(scope="function", name="delete_all") -def clear_typesense_collections() -> None: - """Remove all collections from the Typesense server.""" - url = "http://localhost:8108/collections" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers) - response.raise_for_status() - collections = response.json() - - # Delete each collection - for collection in collections: - collection_name = collection["name"] - delete_url = f"{url}/{collection_name}" - delete_response = requests.delete(delete_url, headers=headers) - delete_response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_collection") -def create_collection_fixture() -> None: - """Create a collection in the Typesense server.""" - url = "http://localhost:8108/collections" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - data = { - "name": "companies", - "fields": [ - { - "name": "company_name", - "type": "string", - }, - { - "name": "num_employees", - "type": "int32", - }, - ], - "default_sorting_field": "num_employees", - } - - response = requests.post(url, headers=headers, json=data) -@pytest.fixture(scope="function", name="create_document") -def create_document_fixture() -> None: - """Create a document in the Typesense server.""" - url = "http://localhost:8108/collections/companies/documents" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - document_data = { - "id": "0", - "company_name": "Company", - "num_employees": 10, - } - - response = requests.post(url, headers=headers, json=document_data, timeout=3) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_conversation_history_collection") -def create_conversation_history_collection_fixture() -> None: - """Create a collection for conversation history in the Typesense server.""" - url = "http://localhost:8108/collections" - delete_url = "http://localhost:8108/collections/conversation_store" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - collection_data = { - "name": "conversation_store", - "fields": [ - {"name": "conversation_id", "type": "string"}, - {"name": "model_id", "type": "string"}, - {"name": "timestamp", "type": "int32"}, - {"name": "role", "type": "string", "index": False}, - {"name": "message", "type": "string", "index": False}, - ], - } - - delete_response = requests.delete(delete_url, headers=headers, timeout=3) - if delete_response.status_code not in {200, 404}: - delete_response.raise_for_status() - response = requests.post(url, headers=headers, json=collection_data, timeout=3) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_conversations_model") -def create_conversations_model_fixture( - create_conversation_history_collection: None, -) -> str: - """Create a conversations model in the Typesense server.""" - url = "http://localhost:8108/conversations/models" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - conversations_model_data = { - "api_key": os.environ["OPEN_AI_KEY"], - "max_bytes": 16384, - "model_name": "openai/gpt-3.5-turbo", - "history_collection": "conversation_store", - "system_prompt": "This is a system prompt", - } - - response = requests.post( - url, - headers=headers, - json=conversations_model_data, - timeout=3, - ) - - response.raise_for_status() - - conversation_model_id: str = response.json()["id"] - return conversation_model_id - - -@pytest.fixture(scope="function", name="create_stopword") -def create_stopword_fixture() -> None: - """Create a stopword set in the Typesense server.""" - url = "http://localhost:8108/stopwords/company_stopwords" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - stopword_data = { - "stopwords": ["and", "is", "the"], - } - - response = requests.put(url, headers=headers, json=stopword_data, timeout=3) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_key_id") -def create_key_fixture() -> int: - """Create a key set in the Typesense server.""" - url = "http://localhost:8108/keys" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - api_key_data = { - "actions": ["documents:search"], - "collections": ["companies"], - "description": "Search-only key", - } - - response = requests.post(url, headers=headers, json=api_key_data, timeout=3) - response.raise_for_status() - key_id: int = response.json()["id"] - return key_id - - -@pytest.fixture(scope="function", name="delete_all_aliases") -def clear_typesense_aliases() -> None: - """Remove all aliases from the Typesense server.""" - url = "http://localhost:8108/aliases" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers) - response.raise_for_status() - - aliases = response.json() - - # Delete each alias - for alias in aliases["aliases"]: - alias_name = alias.get("name") - delete_url = f"{url}/{alias_name}" - delete_response = requests.delete(delete_url, headers=headers) -@pytest.fixture(scope="function", name="delete_all_conversations_models") -def clear_typesense_conversations_models() -> None: - """Remove all conversations_models from the Typesense server.""" - url = "http://localhost:8108/conversations/models" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers, timeout=3) - response.raise_for_status() - - conversations_models = response.json() - - # Delete each alias - for conversation_model in conversations_models: - conversation_model_id = conversation_model.get("id") - delete_url = f"{url}/{conversation_model_id}" - delete_response = requests.delete(delete_url, headers=headers, timeout=3) - delete_response.raise_for_status() - - -@pytest.fixture(scope="function", name="delete_all_stopwords") -def clear_typesense_stopwords() -> None: - """Remove all stopwords from the Typesense server.""" - url = "http://localhost:8108/stopwords" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers, timeout=3) - response.raise_for_status() - stopwords = response.json() - - # Delete each stopword - for stopword_set in stopwords["stopwords"]: - stopword_id = stopword_set.get("id") - delete_url = f"{url}/{stopword_id}" - delete_response = requests.delete(delete_url, headers=headers, timeout=3) - delete_response.raise_for_status() - - -@pytest.fixture(scope="function", name="delete_all_keys") -def clear_typesense_keys() -> None: - """Remove all keys from the Typesense server.""" - url = "http://localhost:8108/keys" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers, timeout=3) - response.raise_for_status() - - keys = response.json() - - # Delete each key - for key in keys["keys"]: - key_name = key.get("id") - delete_url = f"{url}/{key_name}" - delete_response = requests.delete(delete_url, headers=headers, timeout=3) - delete_response.raise_for_status() - - -@pytest.fixture(scope="function", name="delete_all_analytics_rules") -def clear_typesense_analytics_rules() -> None: - """Remove all analytics_rules from the Typesense server.""" - url = "http://localhost:8108/analytics/rules" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - - # Get the list of collections - response = requests.get(url, headers=headers, timeout=3) - response.raise_for_status() - analytics_rules = response.json() - - # Delete each analytics_rule - for analytics_rule_set in analytics_rules["rules"]: - analytics_rule_id = analytics_rule_set.get("name") - delete_url = f"{url}/{analytics_rule_id}" - delete_response = requests.delete(delete_url, headers=headers, timeout=3) - delete_response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_another_collection") -def create_another_collection_fixture() -> None: - """Create a collection in the Typesense server.""" - url = "http://localhost:8108/collections" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - data = { - "name": "companies_2", - "fields": [ - { - "name": "company_name", - "type": "string", - }, - { - "name": "num_employees", - "type": "int32", - }, - ], - "default_sorting_field": "num_employees", - } - - response = requests.post(url, headers=headers, json=data) - -@pytest.fixture(scope="function", name="create_query_collection") -def create_query_collection_fixture() -> None: - """Create a collection in the Typesense server.""" - url = "http://localhost:8108/collections" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - query_collection_data = { - "name": "companies_queries", - "fields": [ - { - "name": "q", - "type": "string", - }, - { - "name": "count", - "type": "int32", - }, - ], - } - - response = requests.post( - url, - headers=headers, - json=query_collection_data, - timeout=3, - ) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_analytics_rule") -def create_analytics_rule_fixture( - create_collection: None, - create_query_collection: None, -) -> None: - """Create a collection in the Typesense server.""" - url = "http://localhost:8108/analytics/rules" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - analytics_rule_data = { - "name": "company_analytics_rule", - "type": "nohits_queries", - "params": { - "source": { - "collections": ["companies"], - }, - "destination": {"collection": "companies_queries"}, - }, - } - - response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_override") -def create_override_fixture(create_collection: None) -> None: - url = "http://localhost:8108/collections/companies/overrides/company_override" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - data = { - "rule": {"match": "exact", "query": "companies"}, - "filter_by": "num_employees>10", - } - - response = requests.put(url, headers=headers, json=data) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_synonym") -def create_synonym_fixture(create_collection: None) -> None: - url = "http://localhost:8108/collections/companies/synonyms/company_synonym" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - data = { - "synonyms": ["companies", "corporations", "firms"], - } - - response = requests.put(url, headers=headers, json=data) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="create_alias") -def create_alias_fixture(create_collection: None) -> None: - url = "http://localhost:8108/aliases/company_alias" - headers = {"X-TYPESENSE-API-KEY": "xyz"} - data = { - "collection_name": "companies", - } - - response = requests.put(url, headers=headers, json=data) - response.raise_for_status() - - -@pytest.fixture(scope="function", name="actual_config") -def actual_config_fixture() -> Configuration: - return Configuration( - config_dict={ - "api_key": "xyz", - "nodes": [ - { - "host": "localhost", - "port": 8108, - "protocol": "http", - } - ], - } - ) - - -@pytest.fixture(scope="function", name="actual_api_call") -def actual_api_call_fixture(actual_config: Configuration) -> ApiCall: - return ApiCall(actual_config) - - -@pytest.fixture(scope="function", name="actual_collections") -def actual_collections_fixture(actual_api_call: ApiCall) -> Collections: - return Collections(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_documents") -def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: - """Return a Documents object using a real API.""" - return Documents(actual_api_call, "companies") - - -@pytest.fixture(scope="function", name="actual_debug") -def actual_debug_fixture(actual_api_call: ApiCall) -> Debug: - """Return a Debug object using a real API.""" - return Debug(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_multi_search") -def actual_multi_search_fixture(actual_api_call: ApiCall) -> MultiSearch: - """Return a MultiSearch object using a real API.""" - return MultiSearch(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_overrides") -def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: - return Overrides(actual_api_call, "companies") - - -@pytest.fixture(scope="function", name="actual_conversations_models") -def actual_conversations_models_fixture( - actual_api_call: ApiCall, -) -> ConversationsModels: - """Return a ConversationsModels object using a real API.""" - return ConversationsModels(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_synonyms") -def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: - return Synonyms(actual_api_call, "companies") - - -@pytest.fixture(scope="function", name="actual_aliases") -def actual_aliases_fixture(actual_api_call: ApiCall) -> Aliases: - return Aliases(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_stopwords") -def actual_stopwords_fixture(actual_api_call: ApiCall) -> Stopwords: - """Return a Stopwords object using a real API.""" - return Stopwords(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_stopwords_set") -def actual_stopwords_set_fixture(actual_api_call: ApiCall) -> StopwordsSet: - """Return a Stopwords object using a real API.""" - return StopwordsSet(actual_api_call, "company_stopwords") - - -@pytest.fixture(scope="function", name="actual_operations") -def actual_operations_fixture(actual_api_call: ApiCall) -> Operations: - """Return a Operations object using a real API.""" - return Operations(actual_api_call) - - -@pytest.fixture(scope="function", name="fake_config") -def fake_config_fixture() -> Configuration: - """Return a Configuration object with test values.""" - return Configuration( - config_dict={ - "api_key": "test-api-key", - "nodes": [ - { - "host": "node0", - "port": 8108, - "protocol": "http", - }, - { - "host": "node1", - "port": 8108, - "protocol": "http", - }, - { - "host": "node2", - "port": 8108, - "protocol": "http", - }, - ], - "nearest_node": { - "host": "nearest", - "port": 8108, - "protocol": "http", - }, - "num_retries": 3, - "healthcheck_interval_seconds": 60, - "retry_interval_seconds": 0.001, - "connection_timeout_seconds": 0.001, - "verify": True, - }, - ) - - -@pytest.fixture(scope="function", name="fake_api_call") -def fake_api_call_fixture( - fake_config: Configuration, -) -> ApiCall: - """Return an ApiCall object with test values.""" - return ApiCall(fake_config) - - -@pytest.fixture(scope="function", name="fake_collections") -def fake_collections_fixture(fake_api_call: ApiCall) -> Collections: - """Return a Collection object with test values.""" - return Collections(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_analytics_rules") -def fake_analytics_rules_fixture(fake_api_call: ApiCall) -> AnalyticsRules: - """Return a AnalyticsRule object with test values.""" - return AnalyticsRules(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_collection") -def fake_collection_fixture(fake_api_call: ApiCall) -> Collection: - """Return a Collection object with test values.""" - return Collection(fake_api_call, "companies") - - -@pytest.fixture(scope="function", name="fake_overrides") -def fake_overrides_fixture(fake_api_call: ApiCall) -> Overrides: - """Return a Collection object with test values.""" - return Overrides(fake_api_call, "companies") - - -@pytest.fixture(scope="function", name="fake_conversations_models") -def fake_conversations_models_fixture(fake_api_call: ApiCall) -> ConversationsModels: - """Return a Collection object with test values.""" - return ConversationsModels(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_conversation_model") -def fake_conversation_model_fixture(fake_api_call: ApiCall) -> ConversationModel: - """Return a ConversationModel object with test values.""" - return ConversationModel(fake_api_call, "conversation_model_id") - - -@pytest.fixture(scope="function", name="fake_override") -def fake_override_fixture(fake_api_call: ApiCall) -> Override: - """Return a Collection object with test values.""" - return Override(fake_api_call, "companies", "company_override") - - -@pytest.fixture(scope="function", name="fake_synonyms") -def fake_synonyms_fixture(fake_api_call: ApiCall) -> Synonyms: - """Return a Collection object with test values.""" - return Synonyms(fake_api_call, "companies") - - -@pytest.fixture(scope="function", name="fake_synonym") -def fake_synonym_fixture(fake_api_call: ApiCall) -> Synonym: - """Return a Collection object with test values.""" - return Synonym(fake_api_call, "companies", "company_synonym") - - -@pytest.fixture(scope="function", name="fake_aliases") -def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: - """Return a Collection object with test values.""" - return Aliases(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_alias") -def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: - """Return a Collection object with test values.""" - return Alias(fake_api_call, "company_alias") - - -@pytest.fixture(scope="function", name="fake_stopwords") -def fake_stopwords_fixture(fake_api_call: ApiCall) -> Stopwords: - """Return a Stopwords object with test values.""" - return Stopwords(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_stopwords_set") -def fake_stopwords_set_fixture(fake_api_call: ApiCall) -> StopwordsSet: - """Return a Collection object with test values.""" - return StopwordsSet(fake_api_call, "company_stopwords") - - -@pytest.fixture(scope="function", name="actual_analytics_rules") -def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: - """Return a AnalyticsRules object using a real API.""" - return AnalyticsRules(actual_api_call) - - -@pytest.fixture(scope="function", name="actual_keys") -def actual_keys_fixture(actual_api_call: ApiCall) -> Keys: - """Return a Keys object using a real API.""" - return Keys(actual_api_call) - - -@pytest.fixture(scope="function", name="fake_analytics_rule") -def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: - """Return a Collection object with test values.""" - return AnalyticsRule(fake_api_call, "company_analytics_rule") - - -@pytest.fixture(scope="function", name="fake_operations") -def fake_operations_fixture(fake_api_call: ApiCall) -> Operations: - """Return a Collection object with test values.""" - return Operations(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_keys") -def fake_keys_fixture(fake_api_call: ApiCall) -> Keys: - """Return a AnalyticsRule object with test values.""" - return Keys(fake_api_call) - - -@pytest.fixture(scope="function", name="fake_key") -def fake_key_fixture(fake_api_call: ApiCall) -> Key: - """Return a Key object with test values.""" - return Key(fake_api_call, 1) - - -@pytest.fixture(scope="function", name="fake_documents") -def fake_documents_fixture(fake_api_call: ApiCall) -> Documents: - """Return a Documents object with test values.""" - return Documents(fake_api_call, "companies") - - -@pytest.fixture(scope="function", name="fake_document") -def fake_document_fixture(fake_api_call: ApiCall) -> Document: - """Return a Document object with test values.""" - return Document(fake_api_call, "companies", "0") - - -@pytest.fixture(scope="function", name="fake_debug") -def fake_debug_fixture(fake_api_call: ApiCall) -> Debug: - """Return a debug object with test values.""" - return Debug(fake_api_call) - - -class Company(typing.TypedDict): - """Company data type.""" - - id: str - company_name: str - num_employees: int - - -@pytest.fixture(scope="function", name="generate_companies") -def generate_companies_fixture() -> typing.List[Company]: - """Generate a list of companies using fake data.""" - companies: typing.List[Company] = [] - for _ in range(50): - companies.append( - { - "id": str(_), - "company_name": fake.company(), - "num_employees": fake.random_int(1, 1000), - }, - ) - - return companies +pytest_plugins = [ + fixture_file.replace("/", ".").replace(".py", "") + for fixture_file in glob("**/tests/fixtures/[!__]*.py", recursive=True) +] diff --git a/tests/document_test.py b/tests/document_test.py index 7c4d210..0392f1b 100644 --- a/tests/document_test.py +++ b/tests/document_test.py @@ -4,7 +4,7 @@ import requests_mock -from tests.conftest import Company +from tests.fixtures.document_fixtures import Companies from tests.utils.object_assertions import ( assert_match_object, assert_object_lists_match, @@ -34,7 +34,7 @@ def test_init(fake_api_call: ApiCall) -> None: def test_retrieve(fake_document: Document) -> None: """Test that the Document object can retrieve an document.""" - json_response: Company = { + json_response: Companies = { "company_name": "Company", "id": "0", "num_employees": 10, @@ -59,7 +59,7 @@ def test_retrieve(fake_document: Document) -> None: def test_delete(fake_document: Document) -> None: """Test that the Document object can delete an document.""" - json_response: Company = { + json_response: Companies = { "company_name": "Company", "id": "0", "num_employees": 10, diff --git a/tests/documents_test.py b/tests/documents_test.py index 29b66d7..3796f82 100644 --- a/tests/documents_test.py +++ b/tests/documents_test.py @@ -12,7 +12,7 @@ import pytest from pytest_mock import MockFixture -from tests.conftest import Company +from tests.fixtures.document_fixtures import Companies from tests.utils.object_assertions import ( assert_match_object, assert_object_lists_match, @@ -63,14 +63,14 @@ def test_get_existing_document(fake_documents: Documents) -> None: def test_create( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, mocker: MockFixture, ) -> None: """Test that the Documents object can create a document on Typesense server.""" - company: Company = { + company: Companies = { "company_name": "Typesense", "id": "1", "num_employees": 25, @@ -89,14 +89,14 @@ def test_create( def test_upsert( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, mocker: MockFixture, ) -> None: """Test that the Documents object can upsert a document on Typesense server.""" - company: Company = { + company: Companies = { "company_name": "company", "id": "0", "num_employees": 10, @@ -113,7 +113,7 @@ def test_upsert( entity_type=typing.Dict[str, str], ) - updated_company: Company = { + updated_company: Companies = { "company_name": "company_updated", "id": "0", "num_employees": 10, @@ -136,7 +136,7 @@ def test_upsert( def test_update( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -152,7 +152,7 @@ def test_update( def test_create_many( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -160,7 +160,7 @@ def test_create_many( caplog: pytest.LogCaptureFixture, ) -> None: """Test that the Documents object can create many documents on Typesense server.""" - companies: typing.List[Company] = [ + companies: typing.List[Companies] = [ { "company_name": "Typesense", "id": "1", @@ -180,7 +180,7 @@ def test_create_many( def test_export( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, create_document: None, @@ -191,7 +191,7 @@ def test_export( def test_delete( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, create_document: None, @@ -202,7 +202,7 @@ def test_delete( def test_delete_ignore_missing( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, ) -> None: @@ -214,14 +214,14 @@ def test_delete_ignore_missing( def test_import_fail( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, mocker: MockFixture, ) -> None: """Test that the Documents object doesn't throw an error when importing documents.""" - wrong_company: Company = {"company_name": "Wrong", "id": "0", "num_employees": 0} + wrong_company: Companies = {"company_name": "Wrong", "id": "0", "num_employees": 0} companies = generate_companies + [wrong_company] request_spy = mocker.spy(actual_documents, "import_") response = actual_documents.import_(companies) @@ -242,7 +242,7 @@ def test_import_fail( def test_import_empty( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -253,8 +253,8 @@ def test_import_empty( def test_import_json_fail( - actual_documents: Documents[Company], - generate_companies: typing.List[Company], + actual_documents: Documents[Companies], + generate_companies: typing.List[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -271,8 +271,8 @@ def test_import_json_fail( def test_import_batch_size( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -291,8 +291,8 @@ def test_import_batch_size( def test_import_return_docs( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, ) -> None: @@ -305,8 +305,8 @@ def test_import_return_docs( def test_import_return_ids( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, ) -> None: @@ -319,8 +319,8 @@ def test_import_return_ids( def test_import_return_ids_and_docs( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, ) -> None: @@ -337,8 +337,8 @@ def test_import_return_ids_and_docs( def test_import_jsonl( - generate_companies: typing.List[Company], - actual_documents: Documents[Company], + generate_companies: typing.List[Companies], + actual_documents: Documents[Companies], delete_all: None, create_collection: None, caplog: pytest.LogCaptureFixture, @@ -372,7 +372,7 @@ def test_import_jsonl( def test_search( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -407,7 +407,7 @@ def test_search( def test_search_array( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -442,7 +442,7 @@ def test_search_array( def test_search_invalid_parameters( - actual_documents: Documents[Company], + actual_documents: Documents[Companies], actual_api_call: ApiCall, delete_all: None, create_collection: None, @@ -455,7 +455,7 @@ def test_search_invalid_parameters( "q": "com", "query_by": "company_name", "invalid": [ - Company(company_name="", id="", num_employees=0), + Companies(company_name="", id="", num_employees=0), ], }, ) @@ -465,6 +465,6 @@ def test_search_invalid_parameters( { "q": "com", "query_by": "company_name", - "invalid": Company(company_name="", id="", num_employees=0), + "invalid": Companies(company_name="", id="", num_employees=0), }, ) diff --git a/tests/fixtures/alias_fixtures.py b/tests/fixtures/alias_fixtures.py new file mode 100644 index 0000000..b226301 --- /dev/null +++ b/tests/fixtures/alias_fixtures.py @@ -0,0 +1,64 @@ +"""Fixtures for alias tests.""" + +import pytest +import requests + +from typesense.alias import Alias +from typesense.aliases import Aliases +from typesense.api_call import ApiCall + + +@pytest.fixture(scope="function", name="delete_all_aliases") +def clear_typesense_aliases() -> None: + """Remove all aliases from the Typesense server.""" + url = "http://localhost:8108/aliases" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + aliases = response.json() + + # Delete each alias + for alias in aliases["aliases"]: + alias_name = alias.get("name") + delete_url = f"{url}/{alias_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_alias") +def create_alias_fixture(create_collection: None) -> None: + """Create an alias in the Typesense server.""" + url = "http://localhost:8108/aliases/company_alias" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + alias_data = { + "collection_name": "companies", + } + + alias_creation_response = requests.put( + url, + headers=headers, + json=alias_data, + timeout=3, + ) + alias_creation_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_aliases") +def actual_aliases_fixture(actual_api_call: ApiCall) -> Aliases: + """Return a Aliases object using a real API.""" + return Aliases(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_aliases") +def fake_aliases_fixture(fake_api_call: ApiCall) -> Aliases: + """Return a Aliases object with test values.""" + return Aliases(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_alias") +def fake_alias_fixture(fake_api_call: ApiCall) -> Alias: + """Return a Alias object with test values.""" + return Alias(fake_api_call, "company_alias") diff --git a/tests/fixtures/analytics_rule_fixtures.py b/tests/fixtures/analytics_rule_fixtures.py new file mode 100644 index 0000000..2f92008 --- /dev/null +++ b/tests/fixtures/analytics_rule_fixtures.py @@ -0,0 +1,96 @@ +"""Fixtures for the Analytics Rules tests.""" + +import pytest +import requests + +from typesense.analytics_rule import AnalyticsRule +from typesense.analytics_rules import AnalyticsRules +from typesense.api_call import ApiCall + + +@pytest.fixture(scope="function", name="delete_all_analytics_rules") +def clear_typesense_analytics_rules() -> None: + """Remove all analytics_rules from the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of rules + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + analytics_rules = response.json() + + # Delete each analytics_rule + for analytics_rule_set in analytics_rules["rules"]: + analytics_rule_id = analytics_rule_set.get("name") + delete_url = f"{url}/{analytics_rule_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_analytics_rule") +def create_analytics_rule_fixture( + create_collection: None, + create_query_collection: None, +) -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + analytics_rule_data = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + } + + response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_analytics_rules") +def fake_analytics_rules_fixture(fake_api_call: ApiCall) -> AnalyticsRules: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRules(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_analytics_rules") +def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: + """Return a AnalyticsRules object using a real API.""" + return AnalyticsRules(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_analytics_rule") +def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRule(fake_api_call, "company_analytics_rule") + + +@pytest.fixture(scope="function", name="create_query_collection") +def create_query_collection_fixture() -> None: + """Create a query collection for analytics rules in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + query_collection_data = { + "name": "companies_queries", + "fields": [ + { + "name": "q", + "type": "string", + }, + { + "name": "count", + "type": "int32", + }, + ], + } + + response = requests.post( + url, + headers=headers, + json=query_collection_data, + timeout=3, + ) + response.raise_for_status() diff --git a/tests/fixtures/api_call_fixtures.py b/tests/fixtures/api_call_fixtures.py new file mode 100644 index 0000000..dde0c3b --- /dev/null +++ b/tests/fixtures/api_call_fixtures.py @@ -0,0 +1,20 @@ +"""Fixtures for ApiCall tests.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.configuration import Configuration + + +@pytest.fixture(scope="function", name="fake_api_call") +def fake_api_call_fixture( + fake_config: Configuration, +) -> ApiCall: + """Return an ApiCall object with test values.""" + return ApiCall(fake_config) + + +@pytest.fixture(scope="function", name="actual_api_call") +def actual_api_call_fixture(actual_config: Configuration) -> ApiCall: + """Return an ApiCall object using a real API.""" + return ApiCall(actual_config) diff --git a/tests/fixtures/client_fixtures.py b/tests/fixtures/client_fixtures.py new file mode 100644 index 0000000..a6693be --- /dev/null +++ b/tests/fixtures/client_fixtures.py @@ -0,0 +1,20 @@ +"""Fixtures for the client tests.""" + +import pytest + +from typesense.client import Client +from typesense.configuration import ConfigDict + + +@pytest.fixture(scope="function", name="fake_client") +def fake_client_fixture( + fake_config_dict: ConfigDict, +) -> Client: + """Return a client object with test values.""" + return Client(fake_config_dict) + + +@pytest.fixture(scope="function", name="actual_client") +def actual_client_fixture(actual_config_dict: ConfigDict) -> Client: + """Return a client object using a real API.""" + return Client(actual_config_dict) diff --git a/tests/fixtures/collections_fixtures.py b/tests/fixtures/collections_fixtures.py new file mode 100644 index 0000000..de75eea --- /dev/null +++ b/tests/fixtures/collections_fixtures.py @@ -0,0 +1,93 @@ +"""Fixtures for Collections tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.collection import Collection +from typesense.collections import Collections + + +@pytest.fixture(scope="function", name="delete_all") +def clear_typesense_collections() -> None: + """Remove all collections from the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + collections = response.json() + + # Delete each collection + for collection in collections: + collection_name = collection["name"] + delete_url = f"{url}/{collection_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_collection") +def create_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "companies", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_another_collection") +def create_another_collection_fixture() -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/collections" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "companies_2", + "fields": [ + { + "name": "company_name", + "type": "string", + }, + { + "name": "num_employees", + "type": "int32", + }, + ], + "default_sorting_field": "num_employees", + } + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_collections") +def actual_collections_fixture(actual_api_call: ApiCall) -> Collections: + """Return a Collections object using a real API.""" + return Collections(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_collections") +def fake_collections_fixture(fake_api_call: ApiCall) -> Collections: + """Return a Collections object with test values.""" + return Collections(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_collection") +def fake_collection_fixture(fake_api_call: ApiCall) -> Collection: + """Return a Collection object with test values.""" + return Collection(fake_api_call, "companies") diff --git a/tests/fixtures/configuration_fixtures.py b/tests/fixtures/configuration_fixtures.py new file mode 100644 index 0000000..713f403 --- /dev/null +++ b/tests/fixtures/configuration_fixtures.py @@ -0,0 +1,71 @@ +"""Fixtures for Configuration tests.""" + +import pytest + +from typesense.configuration import ConfigDict, Configuration + + +@pytest.fixture(scope="function", name="fake_config_dict") +def fake_config_dict_fixture() -> ConfigDict: + """Return a dictionary with test values.""" + return { + "api_key": "test-api-key", + "nodes": [ + { + "host": "node0", + "port": 8108, + "protocol": "http", + }, + { + "host": "node1", + "port": 8108, + "protocol": "http", + }, + { + "host": "node2", + "port": 8108, + "protocol": "http", + }, + ], + "nearest_node": { + "host": "nearest", + "port": 8108, + "protocol": "http", + }, + "num_retries": 3, + "healthcheck_interval_seconds": 60, + "retry_interval_seconds": 0.001, + "connection_timeout_seconds": 0.001, + "verify": True, + } + + +@pytest.fixture(scope="function", name="actual_config_dict") +def actual_config_dict_fixture() -> ConfigDict: + """Return a dictionary with test values.""" + return { + "api_key": "xyz", + "nodes": [ + { + "host": "localhost", + "port": 8108, + "protocol": "http", + }, + ], + } + + +@pytest.fixture(scope="function", name="fake_config") +def fake_config_fixture(fake_config_dict: ConfigDict) -> Configuration: + """Return a Configuration object with test values.""" + return Configuration( + config_dict=fake_config_dict, + ) + + +@pytest.fixture(scope="function", name="actual_config") +def actual_config_fixture(actual_config_dict: ConfigDict) -> Configuration: + """Return a Configuration object using a real API.""" + return Configuration( + config_dict=actual_config_dict, + ) diff --git a/tests/fixtures/conversation_model_fixtures.py b/tests/fixtures/conversation_model_fixtures.py new file mode 100644 index 0000000..03451e7 --- /dev/null +++ b/tests/fixtures/conversation_model_fixtures.py @@ -0,0 +1,107 @@ +"""Fixtures for the conversation model tests.""" + +import os + +import pytest +import requests +from dotenv import load_dotenv + +from typesense.api_call import ApiCall +from typesense.conversation_model import ConversationModel +from typesense.conversations_models import ConversationsModels + +load_dotenv() + + +@pytest.fixture(scope="function", name="delete_all_conversations_models") +def clear_typesense_conversations_models() -> None: + """Remove all conversations_models from the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of collections + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + conversations_models = response.json() + + # Delete each conversation model + for conversation_model in conversations_models: + conversation_model_id = conversation_model.get("id") + delete_url = f"{url}/{conversation_model_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_conversations_model") +def create_conversations_model_fixture( + create_conversation_history_collection: None, +) -> str: + """Create a conversations model in the Typesense server.""" + url = "http://localhost:8108/conversations/models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + conversations_model_data = { + "api_key": os.environ["OPEN_AI_KEY"], + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "history_collection": "conversation_store", + "system_prompt": "This is a system prompt", + } + + response = requests.post( + url, + headers=headers, + json=conversations_model_data, + timeout=3, + ) + + response.raise_for_status() + + conversation_model_id: str = response.json()["id"] + return conversation_model_id + + +@pytest.fixture(scope="function", name="fake_conversations_models") +def fake_conversations_models_fixture(fake_api_call: ApiCall) -> ConversationsModels: + """Return a Collection object with test values.""" + return ConversationsModels(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_conversation_model") +def fake_conversation_model_fixture(fake_api_call: ApiCall) -> ConversationModel: + """Return a ConversationModel object with test values.""" + return ConversationModel(fake_api_call, "conversation_model_id") + + +@pytest.fixture(scope="function", name="actual_conversations_models") +def actual_conversations_models_fixture( + actual_api_call: ApiCall, +) -> ConversationsModels: + """Return a ConversationsModels object using a real API.""" + return ConversationsModels(actual_api_call) + + +@pytest.fixture(scope="function", name="create_conversation_history_collection") +def create_conversation_history_collection_fixture() -> None: + """Create a collection for conversation history in the Typesense server.""" + url = "http://localhost:8108/collections" + delete_url = "http://localhost:8108/collections/conversation_store" + + headers = {"X-TYPESENSE-API-KEY": "xyz"} + collection_data = { + "name": "conversation_store", + "fields": [ + {"name": "conversation_id", "type": "string"}, + {"name": "model_id", "type": "string"}, + {"name": "timestamp", "type": "int32"}, + {"name": "role", "type": "string", "index": False}, + {"name": "message", "type": "string", "index": False}, + ], + } + + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + if delete_response.status_code not in {200, 404}: + delete_response.raise_for_status() + + response = requests.post(url, headers=headers, json=collection_data, timeout=3) + response.raise_for_status() diff --git a/tests/fixtures/debug_fixtures.py b/tests/fixtures/debug_fixtures.py new file mode 100644 index 0000000..13c29f6 --- /dev/null +++ b/tests/fixtures/debug_fixtures.py @@ -0,0 +1,18 @@ +"""Fixtures for the Debug class tests.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.debug import Debug + + +@pytest.fixture(scope="function", name="actual_debug") +def actual_debug_fixture(actual_api_call: ApiCall) -> Debug: + """Return a Debug object using a real API.""" + return Debug(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_debug") +def fake_debug_fixture(fake_api_call: ApiCall) -> Debug: + """Return a debug object with test values.""" + return Debug(fake_api_call) diff --git a/tests/fixtures/document_fixtures.py b/tests/fixtures/document_fixtures.py new file mode 100644 index 0000000..8e829c3 --- /dev/null +++ b/tests/fixtures/document_fixtures.py @@ -0,0 +1,77 @@ +"""Fixtures for creating documents in the Typesense server.""" + +import sys + +import pytest +import requests +from faker import Faker +from faker.providers import company + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.document import Document +from typesense.documents import Documents + +fake = Faker() +fake.add_provider(company) + + +@pytest.fixture(scope="function", name="create_document") +def create_document_fixture() -> None: + """Create a document in the Typesense server.""" + url = "http://localhost:8108/collections/companies/documents" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + document_data = { + "id": "0", + "company_name": "Company", + "num_employees": 10, + } + + response = requests.post(url, headers=headers, json=document_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_documents") +def actual_documents_fixture(actual_api_call: ApiCall) -> Documents: + """Return a Documents object using a real API.""" + return Documents(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_documents") +def fake_documents_fixture(fake_api_call: ApiCall) -> Documents: + """Return a Documents object with test values.""" + return Documents(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_document") +def fake_document_fixture(fake_api_call: ApiCall) -> Document: + """Return a Document object with test values.""" + return Document(fake_api_call, "companies", "0") + + +class Companies(typing.TypedDict): + """Company data type.""" + + id: str + company_name: str + num_employees: int + + +@pytest.fixture(scope="function", name="generate_companies") +def generate_companies_fixture() -> typing.List[Companies]: + """Generate a list of companies using fake data.""" + companies: typing.List[Companies] = [] + for company_index in range(50): + companies.append( + { + "id": str(company_index), + "company_name": fake.company(), + "num_employees": fake.random_int(1, 1000), + }, + ) + + return companies diff --git a/tests/fixtures/key_fixtures.py b/tests/fixtures/key_fixtures.py new file mode 100644 index 0000000..57833a8 --- /dev/null +++ b/tests/fixtures/key_fixtures.py @@ -0,0 +1,63 @@ +"""Fixtures for the key tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.key import Key +from typesense.keys import Keys + + +@pytest.fixture(scope="function", name="delete_all_keys") +def clear_typesense_keys() -> None: + """Remove all keys from the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of keys + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + keys = response.json() + + # Delete each key + for key in keys["keys"]: + key_name = key.get("id") + delete_url = f"{url}/{key_name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_key_id") +def create_key_fixture() -> int: + """Create a key set in the Typesense server.""" + url = "http://localhost:8108/keys" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + api_key_data = { + "actions": ["documents:search"], + "collections": ["companies"], + "description": "Search-only key", + } + + response = requests.post(url, headers=headers, json=api_key_data, timeout=3) + response.raise_for_status() + key_id: int = response.json()["id"] + return key_id + + +@pytest.fixture(scope="function", name="actual_keys") +def actual_keys_fixture(actual_api_call: ApiCall) -> Keys: + """Return a Keys object using a real API.""" + return Keys(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_keys") +def fake_keys_fixture(fake_api_call: ApiCall) -> Keys: + """Return a AnalyticsRule object with test values.""" + return Keys(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_key") +def fake_key_fixture(fake_api_call: ApiCall) -> Key: + """Return a Key object with test values.""" + return Key(fake_api_call, 1) diff --git a/tests/fixtures/multi_search_fixtures.py b/tests/fixtures/multi_search_fixtures.py new file mode 100644 index 0000000..171ebac --- /dev/null +++ b/tests/fixtures/multi_search_fixtures.py @@ -0,0 +1,12 @@ +"""Fixtures for the MultiSearch class.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.multi_search import MultiSearch + + +@pytest.fixture(scope="function", name="actual_multi_search") +def actual_multi_search_fixture(actual_api_call: ApiCall) -> MultiSearch: + """Return a MultiSearch object using a real API.""" + return MultiSearch(actual_api_call) diff --git a/tests/fixtures/operation_fixtures.py b/tests/fixtures/operation_fixtures.py new file mode 100644 index 0000000..6391ad8 --- /dev/null +++ b/tests/fixtures/operation_fixtures.py @@ -0,0 +1,18 @@ +"""Fixtures for the Operations tests.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.operations import Operations + + +@pytest.fixture(scope="function", name="actual_operations") +def actual_operations_fixture(actual_api_call: ApiCall) -> Operations: + """Return a Operations object using a real API.""" + return Operations(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_operations") +def fake_operations_fixture(fake_api_call: ApiCall) -> Operations: + """Return a Collection object with test values.""" + return Operations(fake_api_call) diff --git a/tests/fixtures/override_fixtures.py b/tests/fixtures/override_fixtures.py new file mode 100644 index 0000000..d584bbe --- /dev/null +++ b/tests/fixtures/override_fixtures.py @@ -0,0 +1,40 @@ +"""Fixtures for the Overrides tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.override import Override +from typesense.overrides import Overrides + + +@pytest.fixture(scope="function", name="create_override") +def create_override_fixture(create_collection: None) -> None: + """Create an override in the Typesense server.""" + url = "http://localhost:8108/collections/companies/overrides/company_override" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + override_data = { + "rule": {"match": "exact", "query": "companies"}, + "filter_by": "num_employees>10", + } + + response = requests.put(url, headers=headers, json=override_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_overrides") +def actual_overrides_fixture(actual_api_call: ApiCall) -> Overrides: + """Return a Overrides object using a real API.""" + return Overrides(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_overrides") +def fake_overrides_fixture(fake_api_call: ApiCall) -> Overrides: + """Return a Override object with test values.""" + return Overrides(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_override") +def fake_override_fixture(fake_api_call: ApiCall) -> Override: + """Return a Override object with test values.""" + return Override(fake_api_call, "companies", "company_override") diff --git a/tests/fixtures/stopword_fixtures.py b/tests/fixtures/stopword_fixtures.py new file mode 100644 index 0000000..eb4bb2d --- /dev/null +++ b/tests/fixtures/stopword_fixtures.py @@ -0,0 +1,69 @@ +"""Fixtures for the stopword tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.stopwords import Stopwords +from typesense.stopwords_set import StopwordsSet + + +@pytest.fixture(scope="function", name="create_stopword") +def create_stopword_fixture() -> None: + """Create a stopword set in the Typesense server.""" + url = "http://localhost:8108/stopwords/company_stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + stopword_data = { + "stopwords": ["and", "is", "the"], + } + + create_stopword_response = requests.put( + url, + headers=headers, + json=stopword_data, + timeout=3, + ) + create_stopword_response.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_stopwords") +def clear_typesense_stopwords() -> None: + """Remove all stopwords from the Typesense server.""" + url = "http://localhost:8108/stopwords" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of stopwords + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + stopwords = response.json() + + # Delete each stopword + for stopword_set in stopwords["stopwords"]: + stopword_id = stopword_set.get("id") + delete_url = f"{url}/{stopword_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_stopwords") +def actual_stopwords_fixture(actual_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object using a real API.""" + return Stopwords(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_stopwords_set") +def actual_stopwords_set_fixture(actual_api_call: ApiCall) -> StopwordsSet: + """Return a Stopwords object using a real API.""" + return StopwordsSet(actual_api_call, "company_stopwords") + + +@pytest.fixture(scope="function", name="fake_stopwords") +def fake_stopwords_fixture(fake_api_call: ApiCall) -> Stopwords: + """Return a Stopwords object with test values.""" + return Stopwords(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_stopwords_set") +def fake_stopwords_set_fixture(fake_api_call: ApiCall) -> StopwordsSet: + """Return a Collection object with test values.""" + return StopwordsSet(fake_api_call, "company_stopwords") diff --git a/tests/fixtures/synonym_fixtures.py b/tests/fixtures/synonym_fixtures.py new file mode 100644 index 0000000..8387cfa --- /dev/null +++ b/tests/fixtures/synonym_fixtures.py @@ -0,0 +1,44 @@ +"""Fixtures for the synonym tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.synonym import Synonym +from typesense.synonyms import Synonyms + + +@pytest.fixture(scope="function", name="create_synonym") +def create_synonym_fixture(create_collection: None) -> None: + """Create a synonym in the Typesense server.""" + url = "http://localhost:8108/collections/companies/synonyms/company_synonym" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + synonym_data = { + "synonyms": ["companies", "corporations", "firms"], + } + + create_synonym_response = requests.put( + url, + headers=headers, + json=synonym_data, + timeout=3, + ) + create_synonym_response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_synonyms") +def fake_synonyms_fixture(fake_api_call: ApiCall) -> Synonyms: + """Return a Synonyms object with test values.""" + return Synonyms(fake_api_call, "companies") + + +@pytest.fixture(scope="function", name="actual_synonyms") +def actual_synonyms_fixture(actual_api_call: ApiCall) -> Synonyms: + """Return a Synonyms object using a real API.""" + return Synonyms(actual_api_call, "companies") + + +@pytest.fixture(scope="function", name="fake_synonym") +def fake_synonym_fixture(fake_api_call: ApiCall) -> Synonym: + """Return a Synonym object with test values.""" + return Synonym(fake_api_call, "companies", "company_synonym") diff --git a/tests/multi_search_test.py b/tests/multi_search_test.py index 4e6d0f9..cdd751a 100644 --- a/tests/multi_search_test.py +++ b/tests/multi_search_test.py @@ -2,7 +2,7 @@ import pytest -from tests.conftest import Company +from tests.fixtures.document_fixtures import Companies from tests.utils.object_assertions import ( assert_match_object, assert_object_lists_match, @@ -154,7 +154,7 @@ def test_search_invalid_parameters( { "q": "com", "query_by": "company_name", - "invalid": [Company(company_name="", id="", num_employees=0)], + "invalid": [Companies(company_name="", id="", num_employees=0)], }, ], }, @@ -167,7 +167,7 @@ def test_search_invalid_parameters( { "q": "com", "query_by": "company_name", - "invalid": Company(company_name="", id="", num_employees=0), + "invalid": Companies(company_name="", id="", num_employees=0), }, ], }, From af2dec92882d8ce2e11286d8d821f6e63cb1bf3f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 15:29:56 +0300 Subject: [PATCH 163/288] feat(client): add type hints to client class - Add type annotations to Client class and its methods - Introduce new get_collection method for typed Collection retrieval - Refactor and organize imports - Remove duplicate imports - Improve type safety with conditional typing imports for Python 3.11+ --- src/typesense/client.py | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/src/typesense/client.py b/src/typesense/client.py index d7d7a21..c4045da 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -1,20 +1,34 @@ +import sys + +from typesense.types.document import DocumentSchema + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.collection import Collection + from .aliases import Aliases +from .analytics import Analytics +from .api_call import ApiCall +from .collections import Collections +from .configuration import ConfigDict, Configuration from .conversations_models import ConversationsModels from .debug import Debug -from .collections import Collections -from .multi_search import MultiSearch from .keys import Keys +from .multi_search import MultiSearch from .operations import Operations -from .configuration import Configuration -from .api_call import ApiCall -from .analytics import Analytics from .stopwords import Stopwords +TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) + + class Client(object): - def __init__(self, config_dict): + def __init__(self, config_dict: ConfigDict) -> None: self.config = Configuration(config_dict) self.api_call = ApiCall(self.config) - self.collections = Collections(self.api_call) + self.collections: Collections[DocumentSchema] = Collections(self.api_call) self.multi_search = MultiSearch(self.api_call) self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) @@ -23,3 +37,14 @@ def __init__(self, config_dict): self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) self.conversations_models = ConversationsModels(self.api_call) + + def typed_collection( + self, + *, + model: typing.Type[TDoc], + name: typing.Union[str, None] = None, + ) -> Collection[TDoc]: + if name is None: + name = model.__name__.lower() + collection: Collection[TDoc] = self.collections[name] + return collection From f64f2970024bc1e2782ee231cfdf6e98d8a74d22 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 15:30:51 +0300 Subject: [PATCH 164/288] test(client): add tests for client class - Implement tests for Client initialization - Add tests for get_collection method with and without name - Create tests for actual collection retrieval - Ensure proper type hinting and imports --- tests/client_test.py | 70 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 tests/client_test.py diff --git a/tests/client_test.py b/tests/client_test.py new file mode 100644 index 0000000..d7d032d --- /dev/null +++ b/tests/client_test.py @@ -0,0 +1,70 @@ +"""Tests for the Client class.""" + +from tests.fixtures.document_fixtures import Companies +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.client import Client +from typesense.configuration import ConfigDict + + +def test_client_init(fake_config_dict: ConfigDict) -> None: + """Test the Client class __init__ method.""" + fake_client = Client(fake_config_dict) + assert fake_client.config == fake_client.api_call.config + + assert_match_object(fake_client.api_call.config, fake_client.config) + assert_object_lists_match(fake_client.api_call.nodes, fake_client.config.nodes) + assert_match_object( + fake_client.api_call.config.nearest_node, + fake_client.config.nearest_node, + ) + + assert fake_client.collections + assert fake_client.collections.collections is not None + assert fake_client.multi_search + assert fake_client.keys + assert fake_client.keys.keys is not None + assert fake_client.aliases + assert fake_client.aliases.aliases is not None + assert fake_client.analytics + assert fake_client.analytics.rules + assert fake_client.analytics.rules.rules is not None + assert fake_client.operations + assert fake_client.debug + + +def test_get_collection(fake_client: Client) -> None: + """Test the Client class get_collection method.""" + collection = fake_client.typed_collection(model=Companies, name="companies") + + assert collection + assert collection.name == "companies" + assert collection.documents.documents is not None + + +def test_get_collection_no_name(fake_client: Client) -> None: + """Test the Client class get_collection method.""" + collection = fake_client.typed_collection(model=Companies) + + assert collection + assert collection.name == "companies" + assert collection.documents.documents is not None + + +def test_retrieve_collection_actual( + actual_client: Client, + delete_all: None, + create_collection: None, +) -> None: + """Test that the client can retrieve an actual collection.""" + collection = actual_client.typed_collection(model=Companies, name="companies") + + assert collection is not None + + +def test_retrieve_collection_actual_no_name( + actual_client: Client, delete_all: None, create_collection: None, +) -> None: + """Test that the client can retrieve an actual collection.""" + collection = actual_client.typed_collection(model=Companies) + + assert collection is not None From 4c8a001721b4af7a8229ec6f98589dabe2233fd2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 15:33:13 +0300 Subject: [PATCH 165/288] chore: remove unused mypy ignore comment --- tests/configuration_validations_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/configuration_validations_test.py b/tests/configuration_validations_test.py index d5be683..99c6202 100644 --- a/tests/configuration_validations_test.py +++ b/tests/configuration_validations_test.py @@ -27,7 +27,7 @@ def test_validate_node_fields_with_valid_dict() -> None: def test_validate_node_fields_with_invalid_dict() -> None: """Test validate_node_fields with an invalid dictionary.""" assert not ConfigurationValidations.validate_node_fields( - { # type: ignore[arg-type] + { "host": "localhost", "port": 8108, }, From 686c126e17c1edb30b874ab1227ce9bf4bfc5df0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 15:33:26 +0300 Subject: [PATCH 166/288] chore: add pyenv python ver --- .python-version | 1 + 1 file changed, 1 insertion(+) create mode 100644 .python-version diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..cc1923a --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.8 From 51fdb98ee95c00cc6165d506d63d3bd91cdcb582 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 16:03:39 +0300 Subject: [PATCH 167/288] refactor(overrides): format override classes based on linting rules - Enhance docstrings and type hints for better clarity - Reorganize methods in Override class for logical flow - Use absolute imports for imported modules - Make resource path immutable - Remove inheritance from base object class --- src/typesense/override.py | 97 ++++++++++++++++++++++++----- src/typesense/overrides.py | 121 +++++++++++++++++++++++++++++++------ 2 files changed, 187 insertions(+), 31 deletions(-) diff --git a/src/typesense/override.py b/src/typesense/override.py index 053ef5e..651039f 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -1,34 +1,103 @@ +""" +This module provides functionality for managing individual overrides in Typesense. + +Classes: + - Override: Handles operations related to a specific override within a collection. + +Methods: + - __init__: Initializes the Override object. + - _endpoint_path: Constructs the API endpoint path for this specific override. + - retrieve: Retrieves the details of this specific override. + - delete: Deletes this specific override. + +The Override class interacts with the Typesense API to manage operations on a +specific override within a collection. It provides methods to retrieve and delete +individual overrides. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.override import OverrideDeleteSchema, OverrideSchema class Override: + """ + Class for managing individual overrides in a Typesense collection. + + This class provides methods to interact with a specific override, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ + def __init__( - self, api_call: ApiCall, collection_name: str, override_id: str + self, + api_call: ApiCall, + collection_name: str, + override_id: str, ) -> None: + """ + Initialize the Override object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + override_id (str): The ID of the override. + """ self.api_call = api_call self.collection_name = collection_name self.override_id = override_id - def _endpoint_path(self) -> str: - from typesense.collections import Collections - from typesense.overrides import Overrides - - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Overrides.RESOURCE_PATH, - self.override_id, - ) - def retrieve(self) -> OverrideSchema: + """ + Retrieve this specific override. + + Returns: + OverrideSchema: The schema containing the override details. + """ response: OverrideSchema = self.api_call.get( - self._endpoint_path(), entity_type=OverrideSchema, as_json=True + self._endpoint_path(), + entity_type=OverrideSchema, + as_json=True, ) return response def delete(self) -> OverrideDeleteSchema: + """ + Delete this specific override. + + Returns: + OverrideDeleteSchema: The schema containing the deletion response. + """ response: OverrideDeleteSchema = self.api_call.delete( - self._endpoint_path(), entity_type=OverrideDeleteSchema + self._endpoint_path(), + entity_type=OverrideDeleteSchema, ) return response + + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific override. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + from typesense.overrides import Overrides + + return "/".join( + [ + Collections.RESOURCE_PATH, + self.collection_name, + Overrides.resource_path, + self.override_id, + ], + ) diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index fdbfc50..89776e8 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -1,62 +1,149 @@ +""" +This module provides functionality for managing overrides in Typesense. + +Classes: + - Overrides: Handles operations related to overrides within a collection. + +Methods: + - __init__: Initializes the Overrides object. + - __getitem__: Retrieves or creates an Override object for a given override_id. + - _endpoint_path: Constructs the API endpoint path for override operations. + - upsert: Creates or updates an override. + - retrieve: Retrieves all overrides for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for overrides. + +The Overrides class interacts with the Typesense API to manage override operations +within a specific collection. It provides methods to create, update, and retrieve +overrides, as well as access individual Override objects. + +For more information regarding Overrides, refer to the Curation [documentation] +(https://typesense.org/docs/27.0/api/curation.html#curation). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from __future__ import annotations import sys from typesense.api_call import ApiCall +from typesense.override import Override from typesense.types.override import ( OverrideCreateSchema, OverrideRetrieveSchema, OverrideSchema, ) -from .override import Override - if sys.version_info >= (3, 11): import typing else: import typing_extensions as typing -class Overrides(object): - RESOURCE_PATH = "overrides" +class Overrides: + """ + Class for managing overrides in a Typesense collection. + + This class provides methods to interact with overrides, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for overrides. + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + overrides (Dict[str, Override]): A dictionary of Override objects. + """ + + resource_path: typing.Final[str] = "overrides" def __init__( self, api_call: ApiCall, collection_name: str, ) -> None: + """ + Initialize the Overrides object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ self.api_call = api_call self.collection_name = collection_name self.overrides: typing.Dict[str, Override] = {} def __getitem__(self, override_id: str) -> Override: + """ + Get or create an Override object for a given override_id. + + Args: + override_id (str): The ID of the override. + + Returns: + Override: The Override object for the given ID. + """ if not self.overrides.get(override_id): self.overrides[override_id] = Override( - self.api_call, self.collection_name, override_id + self.api_call, + self.collection_name, + override_id, ) return self.overrides[override_id] - def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: - from .collections import Collections + def upsert(self, override_id: str, schema: OverrideCreateSchema) -> OverrideSchema: + """ + Create or update an override. - override_id = override_id or "" - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Overrides.RESOURCE_PATH, - override_id, - ) + Args: + id (str): The ID of the override. + schema (OverrideCreateSchema): The schema for creating or updating the override. - def upsert(self, id: str, schema: OverrideCreateSchema) -> OverrideSchema: + Returns: + OverrideSchema: The created or updated override. + """ response: OverrideSchema = self.api_call.put( - endpoint=self._endpoint_path(id), + endpoint=self._endpoint_path(override_id), entity_type=OverrideSchema, body=schema, ) return response def retrieve(self) -> OverrideRetrieveSchema: + """ + Retrieve all overrides for the collection. + + Returns: + OverrideRetrieveSchema: The schema containing all overrides. + """ response: OverrideRetrieveSchema = self.api_call.get( - self._endpoint_path(), entity_type=OverrideRetrieveSchema, as_json=True + self._endpoint_path(), + entity_type=OverrideRetrieveSchema, + as_json=True, ) return response + + def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for override operations. + + Args: + override_id (Union[str, None], optional): The ID of the override. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + + override_id = override_id or "" + + return "/".join( + [ + Collections.RESOURCE_PATH, + self.collection_name, + Overrides.resource_path, + override_id, + ], + ) From 7a72aca2cce40a01a495dc22241bff5791788d2b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 16:12:58 +0300 Subject: [PATCH 168/288] refactor(synonyms): format synonym classes based on linting rules --- src/typesense/synonym.py | 95 ++++++++++++++++++++++++----- src/typesense/synonyms.py | 123 ++++++++++++++++++++++++++++++++------ 2 files changed, 185 insertions(+), 33 deletions(-) diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 6a6e1d2..9d2ef9d 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -1,30 +1,97 @@ +""" +This module provides functionality for managing individual synonyms in Typesense. + +Classes: + - Synonym: Handles operations related to a specific synonym within a collection. + +Methods: + - __init__: Initializes the Synonym object. + - _endpoint_path: Constructs the API endpoint path for this specific synonym. + - retrieve: Retrieves the details of this specific synonym. + - delete: Deletes this specific synonym. + +The Synonym class interacts with the Typesense API to manage operations on a +specific synonym within a collection. It provides methods to retrieve and delete +individual synonyms. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema -class Synonym(object): +class Synonym: + """ + Class for managing individual synonyms in a Typesense collection. + + This class provides methods to interact with a specific synonym, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ + def __init__( - self, api_call: ApiCall, collection_name: str, synonym_id: str + self, + api_call: ApiCall, + collection_name: str, + synonym_id: str, ) -> None: + """ + Initialize the Synonym object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonym_id (str): The ID of the synonym. + """ self.api_call = api_call self.collection_name = collection_name self.synonym_id = synonym_id - def _endpoint_path(self) -> str: - from .collections import Collections - from .synonyms import Synonyms - - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Synonyms.RESOURCE_PATH, - self.synonym_id, - ) - def retrieve(self) -> SynonymSchema: + """ + Retrieve this specific synonym. + + Returns: + SynonymSchema: The schema containing the synonym details. + """ return self.api_call.get(self._endpoint_path(), entity_type=SynonymSchema) def delete(self) -> SynonymDeleteSchema: + """ + Delete this specific synonym. + + Returns: + SynonymDeleteSchema: The schema containing the deletion response. + """ return self.api_call.delete( - self._endpoint_path(), entity_type=SynonymDeleteSchema + self._endpoint_path(), + entity_type=SynonymDeleteSchema, + ) + + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific synonym. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + from typesense.synonyms import Synonyms + + return "/".join( + [ + Collections.RESOURCE_PATH, + self.collection_name, + Synonyms.resource_path, + self.synonym_id, + ], ) diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 0bc3a5a..abd6211 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -1,56 +1,141 @@ +""" +This module provides functionality for managing synonyms in Typesense. + +Classes: + - Synonyms: Handles operations related to synonyms within a collection. + +Methods: + - __init__: Initializes the Synonyms object. + - __getitem__: Retrieves or creates a Synonym object for a given synonym_id. + - _endpoint_path: Constructs the API endpoint path for synonym operations. + - upsert: Creates or updates a synonym. + - retrieve: Retrieves all synonyms for the collection. + +Attributes: + - RESOURCE_PATH: The API resource path for synonyms. + +The Synonyms class interacts with the Typesense API to manage synonym operations +within a specific collection. It provides methods to create, update, and retrieve +synonyms, as well as access individual Synonym objects. + +For more information regarding Synonyms, refer to the Synonyms [documentation] +(https://typesense.org/docs/27.0/api/synonyms.html#synonyms). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys from typesense.api_call import ApiCall +from typesense.synonym import Synonym from typesense.types.synonym import ( SynonymCreateSchema, SynonymSchema, SynonymsRetrieveSchema, ) -from .synonym import Synonym - if sys.version_info >= (3, 11): import typing else: import typing_extensions as typing -class Synonyms(object): - RESOURCE_PATH = "synonyms" +class Synonyms: + """ + Class for managing synonyms in a Typesense collection. + + This class provides methods to interact with synonyms, including + retrieving, creating, and updating them. + + Attributes: + RESOURCE_PATH (str): The API resource path for synonyms. + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + synonyms (Dict[str, Synonym]): A dictionary of Synonym objects. + """ + + resource_path: typing.Final[str] = "synonyms" def __init__(self, api_call: ApiCall, collection_name: str): + """ + Initialize the Synonyms object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ self.api_call = api_call self.collection_name = collection_name self.synonyms: typing.Dict[str, Synonym] = {} def __getitem__(self, synonym_id: str) -> Synonym: + """ + Get or create a Synonym object for a given synonym_id. + + Args: + synonym_id (str): The ID of the synonym. + + Returns: + Synonym: The Synonym object for the given ID. + """ if not self.synonyms.get(synonym_id): self.synonyms[synonym_id] = Synonym( - self.api_call, self.collection_name, synonym_id + self.api_call, + self.collection_name, + synonym_id, ) - return self.synonyms[synonym_id] - def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: - from typesense.collections import Collections + def upsert(self, synonym_id: str, schema: SynonymCreateSchema) -> SynonymSchema: + """ + Create or update a synonym. - synonym_id = synonym_id or "" - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Synonyms.RESOURCE_PATH, - synonym_id, - ) + Args: + id (str): The ID of the synonym. + schema (SynonymCreateSchema): The schema for creating or updating the synonym. - def upsert(self, id: str, schema: SynonymCreateSchema) -> SynonymSchema: + Returns: + SynonymSchema: The created or updated synonym. + """ response = self.api_call.put( - self._endpoint_path(id), body=schema, entity_type=SynonymSchema + self._endpoint_path(synonym_id), + body=schema, + entity_type=SynonymSchema, ) - return response def retrieve(self) -> SynonymsRetrieveSchema: + """ + Retrieve all synonyms for the collection. + + Returns: + SynonymsRetrieveSchema: The schema containing all synonyms. + """ response = self.api_call.get( - self._endpoint_path(), entity_type=SynonymsRetrieveSchema + self._endpoint_path(), + entity_type=SynonymsRetrieveSchema, ) return response + + def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for synonym operations. + + Args: + synonym_id (Union[str, None], optional): The ID of the synonym. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + + synonym_id = synonym_id or "" + return "/".join( + [ + Collections.resource_path, + self.collection_name, + Synonyms.resource_path, + synonym_id, + ], + ) From a596e9d2ca50e4e744ed78f8941aadc3106a2bdc Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 17:10:58 +0300 Subject: [PATCH 169/288] refactor(analytic): format analytic rule classes based on linting rules --- src/typesense/analytics.py | 38 +++++++++- src/typesense/analytics_rule.py | 77 +++++++++++++++++-- src/typesense/analytics_rules.py | 123 +++++++++++++++++++++++++++---- 3 files changed, 212 insertions(+), 26 deletions(-) diff --git a/src/typesense/analytics.py b/src/typesense/analytics.py index b29eee0..941cca5 100644 --- a/src/typesense/analytics.py +++ b/src/typesense/analytics.py @@ -1,8 +1,42 @@ -from typesense.api_call import ApiCall +""" +This module provides functionality for managing analytics in Typesense. + +Classes: + - Analytics: Handles operations related to analytics, including access to analytics rules. + +Methods: + - __init__: Initializes the Analytics object. -from .analytics_rules import AnalyticsRules +The Analytics class serves as an entry point for analytics-related operations in Typesense, +currently providing access to AnalyticsRules. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typesense.analytics_rules import AnalyticsRules +from typesense.api_call import ApiCall class Analytics(object): + """ + Class for managing analytics in Typesense. + + This class provides access to analytics-related functionalities, + currently including operations on analytics rules. + + Attributes: + rules (AnalyticsRules): An instance of AnalyticsRules for managing analytics rules. + """ + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the Analytics object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.rules = AnalyticsRules(api_call) diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index b259609..29e9a64 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,3 +1,25 @@ +""" +This module provides functionality for managing individual analytics rules in Typesense. + +Classes: + - AnalyticsRule: Handles operations related to a specific analytics rule. + +Methods: + - __init__: Initializes the AnalyticsRule object. + - _endpoint_path: Constructs the API endpoint path for this specific analytics rule. + - retrieve: Retrieves the details of this specific analytics rule. + - delete: Deletes this specific analytics rule. + +The AnalyticsRule class interacts with the Typesense API to manage operations on a +specific analytics rule. It provides methods to retrieve and delete individual rules. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys if sys.version_info >= (3, 11): @@ -13,20 +35,39 @@ ) -class AnalyticsRule(object): +class AnalyticsRule: + """ + Class for managing individual analytics rules in Typesense. + + This class provides methods to interact with a specific analytics rule, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + def __init__(self, api_call: ApiCall, rule_id: str): + """ + Initialize the AnalyticsRule object. + + Args: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ self.api_call = api_call self.rule_id = rule_id - @property - def _endpoint_path(self) -> str: - from .analytics_rules import AnalyticsRules - - return "{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, self.rule_id) - def retrieve( self, ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: + """ + Retrieve this specific analytics rule. + + Returns: + Union[RuleSchemaForQueries, RuleSchemaForCounters]: + The schema containing the rule details. + """ response: typing.Union[RuleSchemaForQueries, RuleSchemaForCounters] = ( self.api_call.get( self._endpoint_path, @@ -37,7 +78,27 @@ def retrieve( return response def delete(self) -> RuleDeleteSchema: + """ + Delete this specific analytics rule. + + Returns: + RuleDeleteSchema: The schema containing the deletion response. + """ response: RuleDeleteSchema = self.api_call.delete( - self._endpoint_path, entity_type=RuleDeleteSchema + self._endpoint_path, + entity_type=RuleDeleteSchema, ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific analytics rule. + + Returns: + str: The constructed endpoint path. + """ + from typesense.analytics_rules import AnalyticsRules + + return "/".join([AnalyticsRules.resource_path, self.rule_id]) diff --git a/src/typesense/analytics_rules.py b/src/typesense/analytics_rules.py index 5874320..89f748a 100644 --- a/src/typesense/analytics_rules.py +++ b/src/typesense/analytics_rules.py @@ -1,3 +1,30 @@ +""" +This module provides functionality for managing analytics rules in Typesense. + +Classes: + - AnalyticsRules: Handles operations related to analytics rules. + +Methods: + - __init__: Initializes the AnalyticsRules object. + - __getitem__: Retrieves or creates an AnalyticsRule object for a given rule_id. + - create: Creates a new analytics rule. + - upsert: Creates or updates an analytics rule. + - retrieve: Retrieves all analytics rules. + +Attributes: + - resource_path: The API resource path for analytics rules. + +The AnalyticsRules class interacts with the Typesense API to manage analytics rule operations. +It provides methods to create, update, and retrieve analytics rules, as well as access +individual AnalyticsRule objects. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys if sys.version_info >= (3, 11): @@ -5,45 +32,92 @@ else: import typing_extensions as typing +from typesense.analytics_rule import AnalyticsRule from typesense.api_call import ApiCall from typesense.types.analytics_rule import ( + RuleCreateSchemaForCounters, RuleCreateSchemaForQueries, RuleSchemaForCounters, RuleSchemaForQueries, RulesRetrieveSchema, ) -from .analytics_rule import AnalyticsRule +_RuleParams = typing.Union[ + typing.Dict[str, typing.Union[str, int, bool]], + None, +] class AnalyticsRules(object): - RESOURCE_PATH = "/analytics/rules" + """ + Class for managing analytics rules in Typesense. + + This class provides methods to interact with analytics rules, including + creating, updating, and retrieving them. + + Attributes: + resource_path (str): The API resource path for analytics rules. + api_call (ApiCall): The API call object for making requests. + rules (Dict[str, AnalyticsRule]): A dictionary of AnalyticsRule objects. + """ + + resource_path: typing.Final[str] = "/analytics/rules" def __init__(self, api_call: ApiCall): + """ + Initialize the AnalyticsRules object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call self.rules: typing.Dict[str, AnalyticsRule] = {} def __getitem__(self, rule_id: str) -> AnalyticsRule: + """ + Get or create an AnalyticsRule object for a given rule_id. + + Args: + rule_id (str): The ID of the analytics rule. + + Returns: + AnalyticsRule: The AnalyticsRule object for the given ID. + """ if not self.rules.get(rule_id): self.rules[rule_id] = AnalyticsRule(self.api_call, rule_id) - return self.rules[rule_id] def create( self, - rule: typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], - params: typing.Union[ - typing.Dict[str, typing.Union[str, int, bool]], None - ] = None, - ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: - response: typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries] = ( + rule: typing.Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries], + rule_parameters: _RuleParams = None, + ) -> typing.Union[RuleSchemaForCounters, RuleSchemaForQueries]: + """ + Create a new analytics rule. + + This method can create both counter rules and query rules. + + Args: + rule (Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries]): + The rule schema. Use RuleCreateSchemaForCounters for counter rules + and RuleCreateSchemaForQueries for query rules. + + rule_parameters (_RuleParams, optional): Additional rule parameters. + + Returns: + Union[RuleSchemaForCounters, RuleSchemaForQueries]: + The created rule. Returns RuleSchemaForCounters for counter rules + and RuleSchemaForQueries for query rules. + """ + response: typing.Union[RuleSchemaForCounters, RuleSchemaForQueries] = ( self.api_call.post( - AnalyticsRules.RESOURCE_PATH, + AnalyticsRules.resource_path, body=rule, - params=params, + params=rule_parameters, as_json=True, entity_type=typing.Union[ - RuleSchemaForCounters, RuleCreateSchemaForQueries + RuleSchemaForCounters, + RuleSchemaForQueries, ], ) ) @@ -51,21 +125,38 @@ def create( def upsert( self, - id: str, + rule_id: str, rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + """ + Create or update an analytics rule. + + Args: + rule_id (str): The ID of the rule to upsert. + rule (Union[RuleCreateSchemaForQueries, RuleSchemaForCounters]): The rule schema. + + Returns: + Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: The upserted rule. + """ response = self.api_call.put( - "{0}/{1}".format(AnalyticsRules.RESOURCE_PATH, id), + "/".join([AnalyticsRules.resource_path, rule_id]), body=rule, entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], ) return typing.cast( - typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], response + typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], + response, ) def retrieve(self) -> RulesRetrieveSchema: + """ + Retrieve all analytics rules. + + Returns: + RulesRetrieveSchema: The schema containing all analytics rules. + """ response: RulesRetrieveSchema = self.api_call.get( - AnalyticsRules.RESOURCE_PATH, + AnalyticsRules.resource_path, as_json=True, entity_type=RulesRetrieveSchema, ) From 38f736358ff4a0e9736af7e973dd6bfa5b401d8b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 17:16:32 +0300 Subject: [PATCH 170/288] refactor(debug): format debug class based on linting rules --- src/typesense/debug.py | 54 +++++++++++++++++++++++++++++++++++++++--- tests/debug_test.py | 2 +- 2 files changed, 52 insertions(+), 4 deletions(-) diff --git a/src/typesense/debug.py b/src/typesense/debug.py index cd1f8c6..42e52ab 100644 --- a/src/typesense/debug.py +++ b/src/typesense/debug.py @@ -1,16 +1,64 @@ +""" +This module provides functionality for accessing debug information in Typesense. + +Classes: + - Debug: Handles operations related to retrieving debug information. + +Methods: + - __init__: Initializes the Debug object. + - retrieve: Retrieves debug information from the Typesense server. + +Attributes: + - RESOURCE_PATH: The API resource path for debug operations. + +The Debug class interacts with the Typesense API to fetch debug information, +which can be useful for troubleshooting and system monitoring. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typing import Final + from typesense.api_call import ApiCall from typesense.types.debug import DebugResponseSchema -class Debug(object): - RESOURCE_PATH = "/debug" +class Debug: + """ + Class for accessing debug information in Typesense. + + This class provides methods to retrieve debug information from the Typesense server, + which can be useful for system diagnostics and troubleshooting. + + Attributes: + RESOURCE_PATH (str): The API resource path for debug operations. + api_call (ApiCall): The API call object for making requests. + """ + + resource_path: Final[str] = "/debug" def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the Debug object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call def retrieve(self) -> DebugResponseSchema: + """ + Retrieve debug information from the Typesense server. + + This method sends a GET request to the debug endpoint and returns + the server's debug information. + + Returns: + DebugResponseSchema: A schema containing the debug information. + """ return self.api_call.get( - "{0}".format(Debug.RESOURCE_PATH), + Debug.resource_path, as_json=True, entity_type=DebugResponseSchema, ) diff --git a/tests/debug_test.py b/tests/debug_test.py index 942ec9a..5970b6e 100644 --- a/tests/debug_test.py +++ b/tests/debug_test.py @@ -22,7 +22,7 @@ def test_init(fake_api_call: ApiCall) -> None: debug.api_call.config.nearest_node, fake_api_call.config.nearest_node, ) - assert debug.RESOURCE_PATH == "/debug" # noqa: WPS437 + assert debug.resource_path == "/debug" # noqa: WPS437 def test_retrieve(fake_debug: Debug) -> None: From 5018c372f32219c35257a95a778ae1f2d28a7558 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 17:22:15 +0300 Subject: [PATCH 171/288] refactor(errors): format errors class based on linting rules - Add type hints and docstrings to exception classes - Reorganize imports for better version handling - Remove redudant constructor override - Expand module-level documentation for clarity and context --- src/typesense/exceptions.py | 68 +++++++++++++++++++++++-------------- tests/import_test.py | 1 - 2 files changed, 42 insertions(+), 27 deletions(-) diff --git a/src/typesense/exceptions.py b/src/typesense/exceptions.py index 338228f..f1af518 100644 --- a/src/typesense/exceptions.py +++ b/src/typesense/exceptions.py @@ -1,65 +1,81 @@ -from __future__ import annotations - -import sys - -if sys.version_info >= (3, 11): - import typing -else: - import typing_extensions as typing +""" +This module defines custom exception classes for the Typesense client. + +Classes: + - TypesenseClientError: Base exception class for Typesense client errors. + - ConfigError: Raised when there is an error in the client configuration. + - Timeout: Raised when a request times out. + - RequestMalformed: Raised when a request's parameters are malformed. + - RequestUnauthorized: Raised when a request is unauthorized. + - RequestForbidden: Raised when a request is forbidden. + - ObjectNotFound: Raised when a resource is not found. + - ObjectAlreadyExists: Raised when a resource already exists. + - ObjectUnprocessable: Raised when a resource is unprocessable. + - ServerError: Raised when the server encounters an error. + - ServiceUnavailable: Raised when the service is unavailable. + - HTTPStatus0Error: Raised when the HTTP status code is 0. + - InvalidParameter: Raised when a parameter is invalid. + +These exception classes provide specific error types for various scenarios +that may occur when interacting with the Typesense API. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" class TypesenseClientError(IOError): - def __init__( - self, - *args: object, - **kwargs: typing.Dict[typing.Any, typing.Any], - ) -> None: - super().__init__(*args, **kwargs) + """ + Base exception class for Typesense client errors. + + This class extends IOError and serves as the parent class for all + custom Typesense client exceptions. + """ class ConfigError(TypesenseClientError): - pass + """Raised when there is an error in the client configuration.""" class Timeout(TypesenseClientError): - pass + """Raised when a request times out.""" class RequestMalformed(TypesenseClientError): - pass + """Raised when a request's parameters are malformed.""" class RequestUnauthorized(TypesenseClientError): - pass + """Raised when a request is unauthorized.""" class RequestForbidden(TypesenseClientError): - pass + """Raised when a request is forbidden.""" class ObjectNotFound(TypesenseClientError): - pass + """Raised when a resource is not found.""" class ObjectAlreadyExists(TypesenseClientError): - pass + """Raised when a resource already exists.""" class ObjectUnprocessable(TypesenseClientError): - pass + """Raised when a resource is unprocessable.""" class ServerError(TypesenseClientError): - pass + """Raised when the server encounters an error.""" class ServiceUnavailable(TypesenseClientError): - pass + """Raised when the service is unavailable.""" class HTTPStatus0Error(TypesenseClientError): - pass + """Raised when the HTTP status code is 0.""" class InvalidParameter(TypesenseClientError): - pass + """Raised when a parameter is invalid.""" diff --git a/tests/import_test.py b/tests/import_test.py index f767aa0..72992d5 100644 --- a/tests/import_test.py +++ b/tests/import_test.py @@ -35,7 +35,6 @@ "conversations_models", "document", "documents", - "exceptions", "keys", "multi_search", "overrides", From 07903a7c3eaf9f75ead0b4365b7bd703c628fbc1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 19:29:03 +0300 Subject: [PATCH 172/288] refactor(alias): format alias classes based on linting rules --- src/typesense/alias.py | 75 +++++++++++++++++++++++++++++---- src/typesense/aliases.py | 91 ++++++++++++++++++++++++++++++++++++---- 2 files changed, 151 insertions(+), 15 deletions(-) diff --git a/src/typesense/alias.py b/src/typesense/alias.py index c140434..6786d89 100644 --- a/src/typesense/alias.py +++ b/src/typesense/alias.py @@ -1,25 +1,84 @@ +""" +This module provides functionality for managing individual aliases in Typesense. + +Classes: + - Alias: Handles operations related to a specific alias. + +Methods: + - __init__: Initializes the Alias object. + - retrieve: Retrieves the details of this specific alias. + - delete: Deletes this specific alias. + - _endpoint_path: Constructs the API endpoint path for this specific alias. + +The Alias class interacts with the Typesense API to manage operations on a +specific alias. It provides methods to retrieve and delete individual aliases. + +For more information on collection aliases, refer to the Collection Alias +[documentation](https://typesense.org/docs/27.0/api/collection-alias.html#create-or-update-an-alias) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.alias import AliasSchema class Alias(object): + """ + Class for managing individual aliases in Typesense. + + This class provides methods to interact with a specific alias, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + name (str): The name of the alias. + """ + def __init__(self, api_call: ApiCall, name: str): + """ + Initialize the Alias object. + + Args: + api_call (ApiCall): The API call object for making requests. + name (str): The name of the alias. + """ self.api_call = api_call self.name = name - @property - def _endpoint_path(self) -> str: - from .aliases import Aliases - - return "{0}/{1}".format(Aliases.RESOURCE_PATH, self.name) - def retrieve(self) -> AliasSchema: + """ + Retrieve this specific alias. + + Returns: + AliasSchema: The schema containing the alias details. + """ response: AliasSchema = self.api_call.get( - self._endpoint_path, entity_type=AliasSchema, as_json=True + self._endpoint_path, + entity_type=AliasSchema, + as_json=True, ) return response def delete(self) -> AliasSchema: - response = self.api_call.delete(self._endpoint_path, entity_type=AliasSchema) + """ + Delete this specific alias. + Returns: + AliasSchema: The schema containing the deletion response. + """ + response = self.api_call.delete(self._endpoint_path, entity_type=AliasSchema) return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific alias. + + Returns: + str: The constructed endpoint path. + """ + from typesense.aliases import Aliases + + return "/".join([Aliases.resource_path, self.name]) diff --git a/src/typesense/aliases.py b/src/typesense/aliases.py index dc94517..3d112b3 100644 --- a/src/typesense/aliases.py +++ b/src/typesense/aliases.py @@ -1,3 +1,30 @@ +""" +This module provides functionality for managing aliases in Typesense. + +Classes: + - Aliases: Handles operations related to aliases within a Typesense instance. + +Methods: + - __init__: Initializes the Aliases object. + - __getitem__: Retrieves or creates an Alias object for a given alias name. + - _endpoint_path: Constructs the API endpoint path for alias operations. + - upsert: Creates or updates an alias. + - retrieve: Retrieves all aliases. + +Attributes: + - RESOURCE_PATH: The API resource path for alias operations. + +The Aliases class interacts with the Typesense API to manage alias operations. +It provides methods to create, update, and retrieve aliases, as well as access +individual Alias objects. + +For more information on collection aliases, refer to the Collection Alias +[documentation](https://typesense.org/docs/27.0/api/collection-alias.html#create-or-update-an-alias) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys from typesense.alias import Alias @@ -11,34 +38,84 @@ class Aliases: - RESOURCE_PATH = "/aliases" + """ + Class for managing aliases in Typesense. + + This class provides methods to interact with aliases, including + creating, updating, and retrieving them. + + Attributes: + RESOURCE_PATH (str): The API resource path for alias operations. + api_call (ApiCall): The API call object for making requests. + aliases (Dict[str, Alias]): A dictionary of Alias objects. + """ + + resource_path: typing.Final[str] = "/aliases" def __init__(self, api_call: ApiCall): + """ + Initialize the Aliases object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call self.aliases: typing.Dict[str, Alias] = {} def __getitem__(self, name: str) -> Alias: + """ + Get or create an Alias object for a given alias name. + + Args: + name (str): The name of the alias. + + Returns: + Alias: The Alias object for the given name. + """ if not self.aliases.get(name): self.aliases[name] = Alias(self.api_call, name) - return self.aliases.get(name) - def _endpoint_path(self, alias_name: str) -> str: - return "{0}/{1}".format(Aliases.RESOURCE_PATH, alias_name) - def upsert(self, name: str, mapping: AliasCreateSchema) -> AliasSchema: + """ + Create or update an alias. + + Args: + name (str): The name of the alias. + mapping (AliasCreateSchema): The schema for creating or updating the alias. + + Returns: + AliasSchema: The created or updated alias. + """ response: AliasSchema = self.api_call.put( self._endpoint_path(name), body=mapping, entity_type=AliasSchema, ) - return response def retrieve(self) -> AliasesResponseSchema: + """ + Retrieve all aliases. + + Returns: + AliasesResponseSchema: The schema containing all aliases. + """ response: AliasesResponseSchema = self.api_call.get( - Aliases.RESOURCE_PATH, + Aliases.resource_path, as_json=True, entity_type=AliasesResponseSchema, ) return response + + def _endpoint_path(self, alias_name: str) -> str: + """ + Construct the API endpoint path for alias operations. + + Args: + alias_name (str): The name of the alias. + + Returns: + str: The constructed endpoint path. + """ + return "/".join([Aliases.resource_path, alias_name]) From 24dec9924b9b1115d6c65cbac2cf55ce353aeb5f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 19:59:26 +0300 Subject: [PATCH 173/288] refactor(convo-model): format convo model classes based on linting rules --- src/typesense/conversation_model.py | 84 ++++++++++++++++++++++++--- src/typesense/conversations_models.py | 82 ++++++++++++++++++++++++-- 2 files changed, 154 insertions(+), 12 deletions(-) diff --git a/src/typesense/conversation_model.py b/src/typesense/conversation_model.py index 7da67cc..5e26e4c 100644 --- a/src/typesense/conversation_model.py +++ b/src/typesense/conversation_model.py @@ -1,3 +1,27 @@ +""" +This module provides functionality for managing individual conversation models in Typesense. + +Classes: + - ConversationModel: Handles operations related to a specific conversation model. + +Methods: + - __init__: Initializes the ConversationModel object. + - _endpoint_path: Constructs the API endpoint path for this specific conversation model. + - retrieve: Retrieves the details of this specific conversation model. + - update: Updates this specific conversation model. + - delete: Deletes this specific conversation model. + +The ConversationModel class interacts with the Typesense API to manage operations on a +specific conversation model. It provides methods to retrieve, update, +and delete individual models. + +For more information on conversation models and RAG, refer to the Conversational Search +[documentation](https://typesense.org/docs/27.0/api/conversational-search-rag.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.conversations_model import ( ConversationModelCreateSchema, @@ -6,18 +30,36 @@ ) -class ConversationModel(object): +class ConversationModel: + """ + Class for managing individual conversation models in Typesense. + + This class provides methods to interact with a specific conversation model, + including retrieving, updating, and deleting it. + + Attributes: + model_id (str): The ID of the conversation model. + api_call (ApiCall): The API call object for making requests. + """ + def __init__(self, api_call: ApiCall, model_id: str) -> None: + """ + Initialize the ConversationModel object. + + Args: + api_call (ApiCall): The API call object for making requests. + model_id (str): The ID of the conversation model. + """ self.model_id = model_id self.api_call = api_call - @property - def _endpoint_path(self) -> str: - from .conversations_models import ConversationsModels - - return "{0}/{1}".format(ConversationsModels.RESOURCE_PATH, self.model_id) - def retrieve(self) -> ConversationModelSchema: + """ + Retrieve this specific conversation model. + + Returns: + ConversationModelSchema: The schema containing the conversation model details. + """ response = self.api_call.get( self._endpoint_path, as_json=True, @@ -26,6 +68,16 @@ def retrieve(self) -> ConversationModelSchema: return response def update(self, model: ConversationModelCreateSchema) -> ConversationModelSchema: + """ + Update this specific conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema containing the updated model details. + + Returns: + ConversationModelSchema: The schema containing the updated conversation model. + """ response: ConversationModelSchema = self.api_call.put( self._endpoint_path, body=model, @@ -34,8 +86,26 @@ def update(self, model: ConversationModelCreateSchema) -> ConversationModelSchem return response def delete(self) -> ConversationModelDeleteSchema: + """ + Delete this specific conversation model. + + Returns: + ConversationModelDeleteSchema: The schema containing the deletion response. + """ response: ConversationModelDeleteSchema = self.api_call.delete( self._endpoint_path, entity_type=ConversationModelDeleteSchema, ) return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific conversation model. + + Returns: + str: The constructed endpoint path. + """ + from typesense.conversations_models import ConversationsModels + + return "/".join([ConversationsModels.resource_path, self.model_id]) diff --git a/src/typesense/conversations_models.py b/src/typesense/conversations_models.py index 785b8ac..c333ccb 100644 --- a/src/typesense/conversations_models.py +++ b/src/typesense/conversations_models.py @@ -1,3 +1,32 @@ +""" +This module provides functionality for managing conversation models in Typesense. + +Classes: + - ConversationsModels: Handles operations related to conversation models. + +Methods: + - __init__: Initializes the ConversationsModels object. + - __getitem__: Retrieves or creates a ConversationModel object for a given model_id. + - create: Creates a new conversation model. + - retrieve: Retrieves all conversation models. + +Attributes: + - resource_path: The API resource path for conversation models operations. + +The ConversationsModels class interacts with the Typesense API to manage +conversation model operations. + +It provides methods to create and retrieve conversation models, as well as access +individual ConversationModel objects. + + +For more information on conversation models and RAG, refer to the Conversational Search +[documentation](https://typesense.org/docs/27.0/api/conversational-search-rag.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys from typesense.api_call import ApiCall @@ -11,28 +40,65 @@ else: import typing_extensions as typing -from .conversation_model import ConversationModel +from typesense.conversation_model import ConversationModel class ConversationsModels(object): - RESOURCE_PATH = "/conversations/models" + """ + Class for managing conversation models in Typesense. + + This class provides methods to interact with conversation models, including + creating, retrieving, and accessing individual models. + + Attributes: + resource_path (str): The API resource path for conversation models operations. + api_call (ApiCall): The API call object for making requests. + conversations_models (Dict[str, ConversationModel]): + A dictionary of ConversationModel objects. + """ + + resource_path: typing.Final[str] = "/conversations/models" def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the ConversationsModels object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call self.conversations_models: typing.Dict[str, ConversationModel] = {} def __getitem__(self, model_id: str) -> ConversationModel: + """ + Get or create a ConversationModel object for a given model_id. + + Args: + model_id (str): The ID of the conversation model. + + Returns: + ConversationModel: The ConversationModel object for the given ID. + """ if model_id not in self.conversations_models: self.conversations_models[model_id] = ConversationModel( self.api_call, model_id, ) - return self.conversations_models[model_id] def create(self, model: ConversationModelCreateSchema) -> ConversationModelSchema: + """ + Create a new conversation model. + + Args: + model (ConversationModelCreateSchema): + The schema for creating the conversation model. + + Returns: + ConversationModelSchema: The created conversation model. + """ response = self.api_call.post( - endpoint=ConversationsModels.RESOURCE_PATH, + endpoint=ConversationsModels.resource_path, entity_type=ConversationModelSchema, as_json=True, body=model, @@ -40,8 +106,14 @@ def create(self, model: ConversationModelCreateSchema) -> ConversationModelSchem return response def retrieve(self) -> typing.List[ConversationModelSchema]: + """ + Retrieve all conversation models. + + Returns: + List[ConversationModelSchema]: A list of all conversation models. + """ response: typing.List[ConversationModelSchema] = self.api_call.get( - endpoint=ConversationsModels.RESOURCE_PATH, + endpoint=ConversationsModels.resource_path, entity_type=typing.List[ConversationModelSchema], as_json=True, ) From 9b8ca5535a8730fba264fb97af99c7be2734e9f3 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 20:20:36 +0300 Subject: [PATCH 174/288] refactor(stopwords): format stopword classes based on linting rules --- src/typesense/stopwords.py | 85 ++++++++++++++++++++++++++++++---- src/typesense/stopwords_set.py | 77 ++++++++++++++++++++++++++---- 2 files changed, 146 insertions(+), 16 deletions(-) diff --git a/src/typesense/stopwords.py b/src/typesense/stopwords.py index 86db41c..8fede18 100644 --- a/src/typesense/stopwords.py +++ b/src/typesense/stopwords.py @@ -1,47 +1,116 @@ +""" +This module provides functionality for managing stopwords in Typesense. + +Classes: + - Stopwords: Handles operations related to stopwords and stopword sets. + +Methods: + - __init__: Initializes the Stopwords object. + - __getitem__: Retrieves or creates a StopwordsSet object for a given stopwords_set_id. + - upsert: Creates or updates a stopwords set. + - retrieve: Retrieves all stopwords sets. + +Attributes: + - RESOURCE_PATH: The API resource path for stopwords operations. + +The Stopwords class interacts with the Typesense API to manage stopwords operations. +It provides methods to create, update, and retrieve stopwords sets, as well as access +individual StopwordsSet objects. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys from typesense.api_call import ApiCall +from typesense.stopwords_set import StopwordsSet from typesense.types.stopword import ( StopwordCreateSchema, StopwordSchema, StopwordsRetrieveSchema, ) -from .stopwords_set import StopwordsSet - if sys.version_info >= (3, 11): import typing else: import typing_extensions as typing -class Stopwords(object): - RESOURCE_PATH = "/stopwords" +class Stopwords: + """ + Class for managing stopwords in Typesense. + + This class provides methods to interact with stopwords and stopwords sets, including + creating, updating, retrieving, and accessing individual stopwords sets. + + Attributes: + RESOURCE_PATH (str): The API resource path for stopwords operations. + api_call (ApiCall): The API call object for making requests. + stopwords_sets (Dict[str, StopwordsSet]): A dictionary of StopwordsSet objects. + """ + + resource_path: typing.Final[str] = "/stopwords" def __init__(self, api_call: ApiCall): + """ + Initialize the Stopwords object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call self.stopwords_sets: typing.Dict[str, StopwordsSet] = {} def __getitem__(self, stopwords_set_id: str) -> StopwordsSet: + """ + Get or create a StopwordsSet object for a given stopwords_set_id. + + Args: + stopwords_set_id (str): The ID of the stopwords set. + + Returns: + StopwordsSet: The StopwordsSet object for the given ID. + """ if not self.stopwords_sets.get(stopwords_set_id): self.stopwords_sets[stopwords_set_id] = StopwordsSet( - self.api_call, stopwords_set_id + self.api_call, + stopwords_set_id, ) return self.stopwords_sets[stopwords_set_id] def upsert( - self, stopwords_set_id: str, stopwords_set: StopwordCreateSchema + self, + stopwords_set_id: str, + stopwords_set: StopwordCreateSchema, ) -> StopwordSchema: + """ + Create or update a stopwords set. + + Args: + stopwords_set_id (str): The ID of the stopwords set to upsert. + stopwords_set (StopwordCreateSchema): + The schema for creating or updating the stopwords set. + + Returns: + StopwordSchema: The created or updated stopwords set. + """ response: StopwordSchema = self.api_call.put( - "{}/{}".format(Stopwords.RESOURCE_PATH, stopwords_set_id), + "/".join([Stopwords.resource_path, stopwords_set_id]), body=stopwords_set, entity_type=StopwordSchema, ) return response def retrieve(self) -> StopwordsRetrieveSchema: + """ + Retrieve all stopwords sets. + + Returns: + StopwordsRetrieveSchema: The schema containing all stopwords sets. + """ response: StopwordsRetrieveSchema = self.api_call.get( - "{0}".format(Stopwords.RESOURCE_PATH), + Stopwords.resource_path, as_json=True, entity_type=StopwordsRetrieveSchema, ) diff --git a/src/typesense/stopwords_set.py b/src/typesense/stopwords_set.py index ef3a7e6..dcc68e4 100644 --- a/src/typesense/stopwords_set.py +++ b/src/typesense/stopwords_set.py @@ -1,26 +1,87 @@ +""" +This module provides functionality for managing individual stopwords sets in Typesense. + +Classes: + - StopwordsSet: Handles operations related to a specific stopwords set. + +Methods: + - __init__: Initializes the StopwordsSet object. + - retrieve: Retrieves the details of this specific stopwords set. + - delete: Deletes this specific stopwords set. + - _endpoint_path: Constructs the API endpoint path for this specific stopwords set. + +The StopwordsSet class interacts with the Typesense API to manage operations on a +specific stopwords set. It provides methods to retrieve and delete individual stopwords sets. + +For more information regarding Stopwords, refer to the Stopwords [documentation] +(https://typesense.org/docs/27.0/api/stopwords.html). + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.stopword import StopwordDeleteSchema, StopwordsSingleRetrieveSchema class StopwordsSet: + """ + Class for managing individual stopwords sets in Typesense. + + This class provides methods to interact with a specific stopwords set, + including retrieving and deleting it. + + Attributes: + stopwords_set_id (str): The ID of the stopwords set. + api_call (ApiCall): The API call object for making requests. + """ + def __init__(self, api_call: ApiCall, stopwords_set_id: str) -> None: + """ + Initialize the StopwordsSet object. + + Args: + api_call (ApiCall): The API call object for making requests. + stopwords_set_id (str): The ID of the stopwords set. + """ self.stopwords_set_id = stopwords_set_id self.api_call = api_call - @property - def _endpoint_path(self) -> str: - from .stopwords import Stopwords - - return "{0}/{1}".format(Stopwords.RESOURCE_PATH, self.stopwords_set_id) - def retrieve(self) -> StopwordsSingleRetrieveSchema: + """ + Retrieve this specific stopwords set. + + Returns: + StopwordsSingleRetrieveSchema: The schema containing the stopwords set details. + """ response: StopwordsSingleRetrieveSchema = self.api_call.get( - self._endpoint_path, as_json=True, entity_type=StopwordsSingleRetrieveSchema + self._endpoint_path, + as_json=True, + entity_type=StopwordsSingleRetrieveSchema, ) return response def delete(self) -> StopwordDeleteSchema: + """ + Delete this specific stopwords set. + + Returns: + StopwordDeleteSchema: The schema containing the deletion response. + """ response: StopwordDeleteSchema = self.api_call.delete( - self._endpoint_path, entity_type=StopwordDeleteSchema + self._endpoint_path, + entity_type=StopwordDeleteSchema, ) return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific stopwords set. + + Returns: + str: The constructed endpoint path. + """ + from typesense.stopwords import Stopwords + + return "/".join([Stopwords.resource_path, self.stopwords_set_id]) From 09494fdf17a3d4f3ba30a60819eaf76c13a6f946 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 20:24:28 +0300 Subject: [PATCH 175/288] refactor(logger): format logger module based on linting rules --- src/typesense/logger.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/typesense/logger.py b/src/typesense/logger.py index 0c54f7a..1be7890 100644 --- a/src/typesense/logger.py +++ b/src/typesense/logger.py @@ -1,3 +1,5 @@ +"""Logging configuration for the Typesense Python client.""" + import logging logger = logging.getLogger("typesense") From ff05eaa89f453745ea6187d845283e6441a2194b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 20:29:48 +0300 Subject: [PATCH 176/288] refactor(api-key): format api key classes based on linting rules --- src/typesense/key.py | 80 ++++++++++++++++++++++++++----- src/typesense/keys.py | 108 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 164 insertions(+), 24 deletions(-) diff --git a/src/typesense/key.py b/src/typesense/key.py index 12d3ab6..1a3802d 100644 --- a/src/typesense/key.py +++ b/src/typesense/key.py @@ -1,26 +1,84 @@ +""" +This module provides functionality for managing individual API keys in Typesense. + +Classes: + - Key: Handles operations related to a specific API key. + +Methods: + - __init__: Initializes the Key object. + - _endpoint_path: Constructs the API endpoint path for this specific key. + - retrieve: Retrieves the details of this specific API key. + - delete: Deletes this specific API key. + +The Key class interacts with the Typesense API to manage operations on a +specific API key. It provides methods to retrieve and delete individual keys. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + from typesense.api_call import ApiCall from typesense.types.key import ApiKeyDeleteSchema, ApiKeySchema -class Key(object): +class Key: + """ + Class for managing individual API keys in Typesense. + + This class provides methods to interact with a specific API key, + including retrieving and deleting it. + + Attributes: + key_id (int): The ID of the API key. + api_call (ApiCall): The API call object for making requests. + """ + def __init__(self, api_call: ApiCall, key_id: int) -> None: + """ + Initialize the Key object. + + Args: + api_call (ApiCall): The API call object for making requests. + key_id (int): The ID of the API key. + """ self.key_id = key_id self.api_call = api_call - @property - def _endpoint_path(self) -> str: - from .keys import Keys - - return "{0}/{1}".format(Keys.RESOURCE_PATH, self.key_id) - def retrieve(self) -> ApiKeySchema: + """ + Retrieve this specific API key. + + Returns: + ApiKeySchema: The schema containing the API key details. + """ response: ApiKeySchema = self.api_call.get( - self._endpoint_path, as_json=True, entity_type=ApiKeySchema + self._endpoint_path, + as_json=True, + entity_type=ApiKeySchema, ) return response def delete(self) -> ApiKeyDeleteSchema: - resposne: ApiKeyDeleteSchema = self.api_call.delete( - self._endpoint_path, entity_type=ApiKeyDeleteSchema + """ + Delete this specific API key. + + Returns: + ApiKeyDeleteSchema: The schema containing the deletion response. + """ + response: ApiKeyDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=ApiKeyDeleteSchema, ) - return resposne + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific API key. + + Returns: + str: The constructed endpoint path. + """ + from typesense.keys import Keys + + return "/".join([Keys.resource_path, str(self.key_id)]) diff --git a/src/typesense/keys.py b/src/typesense/keys.py index a183be5..1a78695 100644 --- a/src/typesense/keys.py +++ b/src/typesense/keys.py @@ -1,11 +1,35 @@ +""" +This module provides functionality for managing API keys in Typesense. + +Classes: + - Keys: Handles operations related to API keys. + +Methods: + - __init__: Initializes the Keys object. + - __getitem__: Retrieves or creates a Key object for a given key_id. + - create: Creates a new API key. + - generate_scoped_search_key: Generates a scoped search key. + - retrieve: Retrieves all API keys. + +Attributes: + - resource_path: The API resource path for key operations. + +The Keys class interacts with the Typesense API to manage API key operations. +It provides methods to create, retrieve, and generate scoped search keys, as well as +access individual Key objects. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import base64 import hashlib import hmac import json import sys -from typesense import key from typesense.api_call import ApiCall +from typesense.key import Key from typesense.types.document import GenerateScopedSearchKeyParams from typesense.types.key import ( ApiKeyCreateResponseSchema, @@ -14,51 +38,109 @@ ApiKeySchema, ) -from .key import Key - if sys.version_info >= (3, 11): import typing else: import typing_extensions as typing -class Keys(object): - RESOURCE_PATH = "/keys" +class Keys: + """ + Class for managing API keys in Typesense. + + This class provides methods to interact with API keys, including + creating, retrieving, and generating scoped search keys. + + Attributes: + resource_path (str): The API resource path for key operations. + api_call (ApiCall): The API call object for making requests. + keys (Dict[int, Key]): A dictionary of Key objects. + """ + + resource_path: typing.Final[str] = "/keys" def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the Keys object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ self.api_call = api_call self.keys: typing.Dict[int, Key] = {} def __getitem__(self, key_id: int) -> Key: + """ + Get or create a Key object for a given key_id. + + Args: + key_id (int): The ID of the API key. + + Returns: + Key: The Key object for the given ID. + """ if not self.keys.get(key_id): self.keys[key_id] = Key(self.api_call, key_id) - return self.keys[key_id] def create(self, schema: ApiKeyCreateSchema) -> ApiKeyCreateResponseSchema: + """ + Create a new API key. + + Args: + schema (ApiKeyCreateSchema): The schema for creating the API key. + + Returns: + ApiKeyCreateResponseSchema: The created API key. + """ response: ApiKeySchema = self.api_call.post( - Keys.RESOURCE_PATH, as_json=True, body=schema, entity_type=ApiKeySchema + Keys.resource_path, + as_json=True, + body=schema, + entity_type=ApiKeySchema, ) return response def generate_scoped_search_key( - self, search_key: str, parameters: GenerateScopedSearchKeyParams + self, + search_key: str, + key_parameters: GenerateScopedSearchKeyParams, ) -> bytes: - # Note: only a key generated with the `documents:search` action will be accepted by the server - params_str = json.dumps(parameters) + """ + Generate a scoped search key. + + Note: only a key generated with the `documents:search` + action will be accepted by the server. + + Args: + search_key (str): The search key to use as a base. + key_parameters (GenerateScopedSearchKeyParams): Parameters for the scoped key. + + Returns: + bytes: The generated scoped search key. + """ + params_str = json.dumps(key_parameters) digest = base64.b64encode( hmac.new( search_key.encode("utf-8"), params_str.encode("utf-8"), digestmod=hashlib.sha256, - ).digest() + ).digest(), ) key_prefix = search_key[0:4] - raw_scoped_key = "{}{}{}".format(digest.decode("utf-8"), key_prefix, params_str) + raw_scoped_key = f"{digest.decode('utf-8')}{key_prefix}{params_str}" return base64.b64encode(raw_scoped_key.encode("utf-8")) def retrieve(self) -> ApiKeyRetrieveSchema: + """ + Retrieve all API keys. + + Returns: + ApiKeyRetrieveSchema: The schema containing all API keys. + """ response: ApiKeyRetrieveSchema = self.api_call.get( - Keys.RESOURCE_PATH, entity_type=ApiKeyRetrieveSchema, as_json=True + Keys.resource_path, + entity_type=ApiKeyRetrieveSchema, + as_json=True, ) return response From 9cbabeb36b77fde4a2ca5d0c471f12ee2c274234 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 20:31:35 +0300 Subject: [PATCH 177/288] fix: remove redudant subscript slice from key gen --- src/typesense/keys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/keys.py b/src/typesense/keys.py index 1a78695..2f21fca 100644 --- a/src/typesense/keys.py +++ b/src/typesense/keys.py @@ -127,7 +127,7 @@ def generate_scoped_search_key( digestmod=hashlib.sha256, ).digest(), ) - key_prefix = search_key[0:4] + key_prefix = search_key[:4] raw_scoped_key = f"{digest.decode('utf-8')}{key_prefix}{params_str}" return base64.b64encode(raw_scoped_key.encode("utf-8")) From 9284f1ebd215f52559bdadfa7c44f19c251ef2a9 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 21:26:29 +0300 Subject: [PATCH 178/288] refactor(document): format document classes based on linting rules --- src/typesense/document.py | 129 +++++++++++---- src/typesense/documents.py | 323 +++++++++++++++++++++++++++++-------- 2 files changed, 352 insertions(+), 100 deletions(-) diff --git a/src/typesense/document.py b/src/typesense/document.py index 9cae14f..7142170 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -1,14 +1,28 @@ +""" +This module provides functionality for managing individual documents in Typesense collections. + +Classes: + - Document: Handles operations related to a specific document within a collection. + +Methods: + - __init__: Initializes the Document object. + - _endpoint_path: Constructs the API endpoint path for this specific document. + - retrieve: Retrieves the details of this specific document. + - update: Updates this specific document. + - delete: Deletes this specific document. + +The Document class interacts with the Typesense API to manage operations on a +specific document within a collection. It provides methods to retrieve, update, +and delete individual documents. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import sys from typesense.api_call import ApiCall -from typesense.configuration import Configuration -from typesense.types.collection import CollectionSchema -from typesense.types.document import ( - DeleteQueryParameters, - DirtyValuesParameters, - DocumentSchema, - DocumentWriteParameters, -) +from typesense.types.document import DirtyValuesParameters, DocumentSchema if sys.version_info >= (3, 11): import typing @@ -19,48 +33,103 @@ class Document(typing.Generic[TDoc]): + """ + Class for managing individual documents in a Typesense collection. + + This class provides methods to interact with a specific document, + including retrieving, updating, and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ + def __init__( - self, api_call: ApiCall, collection_name: str, document_id: str + self, + api_call: ApiCall, + collection_name: str, + document_id: str, ) -> None: + """ + Initialize the Document object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + document_id (str): The ID of the document. + """ self.api_call = api_call self.collection_name = collection_name self.document_id = document_id - @property - def _endpoint_path(self) -> str: - from .collections import Collections - from .documents import Documents - - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Documents.RESOURCE_PATH, - self.document_id, - ) - def retrieve(self) -> TDoc: - response = self.api_call.get( + """ + Retrieve this specific document. + + Returns: + TDoc: The retrieved document. + """ + response: TDoc = self.api_call.get( endpoint=self._endpoint_path, entity_type=typing.Dict[str, str], as_json=True, ) - - return typing.cast(TDoc, response) + return response def update( - self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, ) -> TDoc: + """ + Update this specific document. + + Args: + document (TDoc): The updated document data. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The updated document. + """ response = self.api_call.patch( self._endpoint_path, body=document, - params=params, + params=dirty_values_parameters, entity_type=typing.Dict[str, str], ) - return typing.cast(TDoc, response) def delete(self) -> TDoc: - response = self.api_call.delete( - self._endpoint_path, entity_type=typing.Dict[str, str] + """ + Delete this specific document. + + Returns: + TDoc: The deleted document. + """ + response: TDoc = self.api_call.delete( + self._endpoint_path, + entity_type=typing.Dict[str, str], + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific document. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + from typesense.documents import Documents + + return "/".join( + [ + Collections.RESOURCE_PATH, + self.collection_name, + Documents.resource_path, + self.document_id, + ], ) - return typing.cast(TDoc, response) diff --git a/src/typesense/documents.py b/src/typesense/documents.py index 6ed2e3a..9ff1683 100644 --- a/src/typesense/documents.py +++ b/src/typesense/documents.py @@ -1,9 +1,42 @@ -# mypy: disable-error-code="misc" +""" +This module provides functionality for managing documents in Typesense collections. + +Classes: + - Documents: Handles operations related to documents within a collection. + +Methods: + - __init__: Initializes the Documents object. + - __getitem__: Retrieves or creates a Document object for a given document_id. + - _endpoint_path: Constructs the API endpoint path for document operations. + - create: Creates a new document in the collection. + - create_many: (Deprecated) Creates multiple documents in the collection. + - upsert: Creates or updates a document in the collection. + - update: Updates a document in the collection. + - import_jsonl: (Deprecated) Imports documents from a JSONL string. + - import_: Imports documents into the collection. + - export: Exports documents from the collection. + - search: Searches for documents in the collection. + - delete: Deletes documents from the collection based on given parameters. + +Attributes: + - resource_path: The API resource path for document operations. + +The Documents class interacts with the Typesense API to manage document operations +within a specific collection. It provides methods to create, update, import, export, +search, and delete documents. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + import json import sys from typesense.api_call import ApiCall +from typesense.document import Document from typesense.exceptions import TypesenseClientError +from typesense.logger import logger +from typesense.preprocess import stringify_search_params from typesense.types.document import ( DeleteQueryParameters, DeleteResponse, @@ -27,10 +60,8 @@ UpdateByFilterResponse, ) -from .document import Document -from .logger import logger -from .preprocess import stringify_search_params -from .validation import validate_search +# mypy: disable-error-code="misc" + if sys.version_info >= (3, 11): import typing @@ -39,86 +70,169 @@ TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) +_ImportParameters = typing.Union[ + DocumentImportParameters, + None, +] + class Documents(typing.Generic[TDoc]): - RESOURCE_PATH = "documents" + """ + Class for managing documents in a Typesense collection. + + This class provides methods to interact with documents, including + creating, updating, importing, exporting, searching, and deleting them. + + Attributes: + resource_path (str): The API resource path for document operations. + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + documents (Dict[str, Document[TDoc]]): A dictionary of Document objects. + """ + + resource_path: typing.Final[str] = "documents" def __init__(self, api_call: ApiCall, collection_name: str) -> None: + """ + Initialize the Documents object. + + Args: + api_call (ApiCall): The API call object for making requests. + collection_name (str): The name of the collection. + """ self.api_call = api_call self.collection_name = collection_name self.documents: typing.Dict[str, Document[TDoc]] = {} def __getitem__(self, document_id: str) -> Document[TDoc]: + """ + Get or create a Document object for a given document_id. + + Args: + document_id (str): The ID of the document. + + Returns: + Document[TDoc]: The Document object for the given ID. + """ if document_id not in self.documents: self.documents[document_id] = Document( - self.api_call, self.collection_name, document_id + self.api_call, + self.collection_name, + document_id, ) return self.documents[document_id] - def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: - from .collections import Collections - - action = action or "" - return "{0}/{1}/{2}/{3}".format( - Collections.RESOURCE_PATH, - self.collection_name, - Documents.RESOURCE_PATH, - action, - ) - def create( - self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, ) -> TDoc: - params = params or {} - params["action"] = "create" - response = self.api_call.post( + """ + Create a new document in the collection. + + Args: + document (TDoc): The document to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The created document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "create" + response: TDoc = self.api_call.post( self._endpoint_path(), body=document, - params=params, + params=dirty_values_parameters, as_json=True, entity_type=typing.Dict[str, str], ) - return typing.cast(TDoc, response) + return response def create_many( self, documents: typing.List[TDoc], - params: typing.Union[DirtyValuesParameters, None] = None, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: - logger.warning("`create_many` is deprecated: please use `import_`.") - return self.import_(documents, params) + """ + Create multiple documents in the collection. + + Args: + documents (List[TDoc]): The list of documents to create. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + List[Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: + The list of import responses. + """ + logger.warn("`create_many` is deprecated: please use `import_`.") + return self.import_(documents, dirty_values_parameters) def upsert( - self, document: TDoc, params: typing.Union[DirtyValuesParameters, None] = None + self, + document: TDoc, + dirty_values_parameters: typing.Union[DirtyValuesParameters, None] = None, ) -> TDoc: - params = params or {} - params["action"] = "upsert" - response = self.api_call.post( + """ + Create or update a document in the collection. + + Args: + document (TDoc): The document to upsert. + dirty_values_parameters (Union[DirtyValuesParameters, None], optional): + Parameters for handling dirty values. + + Returns: + TDoc: The upserted document. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "upsert" + response: TDoc = self.api_call.post( self._endpoint_path(), body=document, - params=params, + params=dirty_values_parameters, as_json=True, entity_type=typing.Dict[str, str], ) - return typing.cast(TDoc, response) + return response def update( self, document: TDoc, - params: typing.Union[UpdateByFilterParameters, None] = None, + dirty_values_parameters: typing.Union[UpdateByFilterParameters, None] = None, ) -> UpdateByFilterResponse: - params = params or {} - params["action"] = "update" + """ + Update a document in the collection. + + Args: + document (TDoc): The document to update. + dirty_values_parameters (Union[UpdateByFilterParameters, None], optional): + Parameters for handling dirty values and filtering. + + Returns: + UpdateByFilterResponse: The response containing information about the update. + """ + dirty_values_parameters = dirty_values_parameters or {} + dirty_values_parameters["action"] = "update" response: UpdateByFilterResponse = self.api_call.patch( self._endpoint_path(), body=document, - params=params, + params=dirty_values_parameters, entity_type=UpdateByFilterResponse, ) return response def import_jsonl(self, documents_jsonl: str) -> str: + """ + Import documents from a JSONL string. + + Args: + documents_jsonl (str): The JSONL string containing documents to import. + + Returns: + str: The import response as a string. + """ logger.warning("`import_jsonl` is deprecated: please use `import_`.") return self.import_(documents_jsonl) @@ -126,7 +240,7 @@ def import_jsonl(self, documents_jsonl: str) -> str: def import_( self, documents: typing.List[TDoc], - params: DocumentImportParametersReturnDocAndId, + import_parameters: DocumentImportParametersReturnDocAndId, batch_size: typing.Union[int, None] = None, ) -> typing.List[ typing.Union[ImportResponseWithDocAndId[TDoc], ImportResponseFail[TDoc]] @@ -136,7 +250,7 @@ def import_( def import_( self, documents: typing.List[TDoc], - params: DocumentImportParametersReturnId, + import_parameters: DocumentImportParametersReturnId, batch_size: typing.Union[int, None] = None, ) -> typing.List[typing.Union[ImportResponseWithId, ImportResponseFail[TDoc]]]: ... @@ -144,7 +258,7 @@ def import_( def import_( self, documents: typing.List[TDoc], - params: typing.Union[DocumentWriteParameters, None] = None, + import_parameters: typing.Union[DocumentWriteParameters, None] = None, batch_size: typing.Union[int, None] = None, ) -> typing.List[typing.Union[ImportResponseSuccess, ImportResponseFail[TDoc]]]: ... @@ -152,7 +266,7 @@ def import_( def import_( self, documents: typing.List[TDoc], - params: DocumentImportParametersReturnDoc, + import_parameters: DocumentImportParametersReturnDoc, batch_size: typing.Union[int, None] = None, ) -> typing.List[ typing.Union[ImportResponseWithDoc[TDoc], ImportResponseFail[TDoc]] @@ -162,10 +276,7 @@ def import_( def import_( self, documents: typing.List[TDoc], - params: typing.Union[ - DocumentImportParameters, - None, - ], + import_parameters: _ImportParameters, batch_size: typing.Union[int, None] = None, ) -> typing.List[ImportResponse[TDoc]]: ... @@ -173,26 +284,37 @@ def import_( def import_( self, documents: typing.Union[bytes, str], - params: typing.Union[ - DocumentImportParameters, - None, - ] = None, + import_parameters: _ImportParameters = None, batch_size: typing.Union[int, None] = None, ) -> str: ... - # Actual implementation that matches the overloads def import_( self, documents: typing.Union[bytes, str, typing.List[TDoc]], - params: typing.Union[ - DocumentImportParameters, - None, - ] = None, + import_parameters: _ImportParameters = None, batch_size: typing.Union[int, None] = None, - ) -> typing.Union[ - ImportResponse[TDoc], - str, - ]: + ) -> typing.Union[ImportResponse[TDoc], str]: + """ + Import documents into the collection. + + This method supports various input types and import parameters. + It can handle both individual documents and batches of documents. + + Args: + documents (Union[bytes, str, List[TDoc]]): The documents to import. + + import_parameters (Union[DocumentImportParameters, None], optional): + Parameters for the import operation. + + batch_size (Union[int, None], optional): The size of each batch for batch imports. + + Returns: + (ImportResponse[TDoc] | str): + The import response, which can be a list of responses or a string. + + Raises: + TypesenseClientError: If an empty list of documents is provided. + """ if not isinstance(documents, (str, bytes)): if batch_size: response_objs: ImportResponse[TDoc] = [] @@ -200,11 +322,14 @@ def import_( for document in documents: batch.append(document) if len(batch) == batch_size: - api_response = self.import_(documents=batch, params=params) + api_response = self.import_( + documents=batch, + import_parameters=import_parameters, + ) response_objs.extend(api_response) batch = [] if batch: - api_response = self.import_(batch, params) + api_response = self.import_(batch, import_parameters) response_objs.extend(api_response) else: @@ -214,14 +339,14 @@ def import_( if len(document_strs) == 0: raise TypesenseClientError( - f"Cannot import an empty list of documents." + "Cannot import an empty list of documents.", ) docs_import = "\n".join(document_strs) res = self.api_call.post( self._endpoint_path("import"), body=docs_import, - params=params, + params=import_parameters, entity_type=str, as_json=False, ) @@ -237,10 +362,10 @@ def import_( ImportResponseSuccess, ImportResponseFail[TDoc], ] = json.loads(res_obj_str) - except json.JSONDecodeError as e: + except json.JSONDecodeError as decode_error: raise TypesenseClientError( - f"Invalid response - {res_obj_str}" - ) from e + f"Invalid response - {res_obj_str}", + ) from decode_error response_objs.append(res_obj_json) return response_objs @@ -248,21 +373,44 @@ def import_( api_response = self.api_call.post( self._endpoint_path("import"), body=documents, - params=params, + params=import_parameters, as_json=False, entity_type=str, ) return api_response def export( - self, params: typing.Union[DocumentExportParameters, None] = None + self, + export_parameters: typing.Union[DocumentExportParameters, None] = None, ) -> str: + """ + Export documents from the collection. + + Args: + export_parameters (Union[DocumentExportParameters, None], optional): + Parameters for the export operation. + + Returns: + str: The exported documents as a string. + """ api_response: str = self.api_call.get( - self._endpoint_path("export"), params=params, as_json=False, entity_type=str + self._endpoint_path("export"), + params=export_parameters, + as_json=False, + entity_type=str, ) return api_response def search(self, search_parameters: SearchParameters) -> SearchResponse[TDoc]: + """ + Search for documents in the collection. + + Args: + search_parameters (SearchParameters): The search parameters. + + Returns: + SearchResponse[TDoc]: The search response containing matching documents. + """ stringified_search_params = stringify_search_params(search_parameters) response: SearchResponse[TDoc] = self.api_call.get( self._endpoint_path("search"), @@ -273,9 +421,44 @@ def search(self, search_parameters: SearchParameters) -> SearchResponse[TDoc]: return response def delete( - self, params: typing.Union[DeleteQueryParameters, None] = None + self, + delete_parameters: typing.Union[DeleteQueryParameters, None] = None, ) -> DeleteResponse: + """ + Delete documents from the collection based on given parameters. + + Args: + delete_parameters (Union[DeleteQueryParameters, None], optional): + Parameters for deletion. + + Returns: + DeleteResponse: The response containing information about the deletion. + """ response: DeleteResponse = self.api_call.delete( - self._endpoint_path(), params=params, entity_type=DeleteResponse + self._endpoint_path(), + params=delete_parameters, + entity_type=DeleteResponse, ) return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for document operations. + + Args: + action (Union[str, None], optional): The action to perform. Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + from typesense.collections import Collections + + action = action or "" + return "/".join( + [ + Collections.RESOURCE_PATH, + self.collection_name, + self.resource_path, + action, + ], + ) From 116d2e1e8d7e88d16e14823a21efc136ab379dc1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 22:00:55 +0300 Subject: [PATCH 179/288] refactor(documents): refactor import method for improved efficiency - Split import_ into smaller, focused methods - Optimize batch import logic - Update test cases to reflect new implementation --- src/typesense/documents.py | 145 +++++++++++++++++++------------------ tests/documents_test.py | 18 ++++- 2 files changed, 89 insertions(+), 74 deletions(-) diff --git a/src/typesense/documents.py b/src/typesense/documents.py index 9ff1683..8c27338 100644 --- a/src/typesense/documents.py +++ b/src/typesense/documents.py @@ -301,83 +301,23 @@ def import_( It can handle both individual documents and batches of documents. Args: - documents (Union[bytes, str, List[TDoc]]): The documents to import. - - import_parameters (Union[DocumentImportParameters, None], optional): - Parameters for the import operation. - - batch_size (Union[int, None], optional): The size of each batch for batch imports. + documents: The documents to import. + import_parameters: Parameters for the import operation. + batch_size: The size of each batch for batch imports. Returns: - (ImportResponse[TDoc] | str): - The import response, which can be a list of responses or a string. + The import response, which can be a list of responses or a string. Raises: TypesenseClientError: If an empty list of documents is provided. """ - if not isinstance(documents, (str, bytes)): - if batch_size: - response_objs: ImportResponse[TDoc] = [] - batch: typing.List[TDoc] = [] - for document in documents: - batch.append(document) - if len(batch) == batch_size: - api_response = self.import_( - documents=batch, - import_parameters=import_parameters, - ) - response_objs.extend(api_response) - batch = [] - if batch: - api_response = self.import_(batch, import_parameters) - response_objs.extend(api_response) - - else: - document_strs: typing.List[str] = [] - for document in documents: - document_strs.append(json.dumps(document)) - - if len(document_strs) == 0: - raise TypesenseClientError( - "Cannot import an empty list of documents.", - ) - - docs_import = "\n".join(document_strs) - res = self.api_call.post( - self._endpoint_path("import"), - body=docs_import, - params=import_parameters, - entity_type=str, - as_json=False, - ) - res_obj_strs = res.split("\n") - - response_objs = [] - for res_obj_str in res_obj_strs: - try: - res_obj_json: typing.Union[ - ImportResponseWithDocAndId[TDoc], - ImportResponseWithDoc[TDoc], - ImportResponseWithId, - ImportResponseSuccess, - ImportResponseFail[TDoc], - ] = json.loads(res_obj_str) - except json.JSONDecodeError as decode_error: - raise TypesenseClientError( - f"Invalid response - {res_obj_str}", - ) from decode_error - response_objs.append(res_obj_json) - - return response_objs - else: - api_response = self.api_call.post( - self._endpoint_path("import"), - body=documents, - params=import_parameters, - as_json=False, - entity_type=str, - ) - return api_response + if isinstance(documents, (str, bytes)): + return self._import_raw(documents, import_parameters) + + if batch_size: + return self._batch_import(documents, import_parameters, batch_size) + + return self._bulk_import(documents, import_parameters) def export( self, @@ -462,3 +402,66 @@ def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: action, ], ) + + def _import_raw( + self, + documents: typing.Union[bytes, str], + import_parameters: _ImportParameters, + ) -> str: + """Import raw document data.""" + response: str = self.api_call.post( + self._endpoint_path("import"), + body=documents, + params=import_parameters, + as_json=False, + entity_type=str, + ) + + return response + + def _batch_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + batch_size: int, + ) -> ImportResponse[TDoc]: + """Import documents in batches.""" + response_objs: ImportResponse[TDoc] = [] + for batch_index in range(0, len(documents), batch_size): + batch = documents[batch_index : batch_index + batch_size] + api_response = self._bulk_import(batch, import_parameters) + response_objs.extend(api_response) + return response_objs + + def _bulk_import( + self, + documents: typing.List[TDoc], + import_parameters: _ImportParameters, + ) -> ImportResponse[TDoc]: + """Import a list of documents in bulk.""" + document_strs = [json.dumps(doc) for doc in documents] + if not document_strs: + raise TypesenseClientError("Cannot import an empty list of documents.") + + docs_import = "\n".join(document_strs) + res = self.api_call.post( + self._endpoint_path("import"), + body=docs_import, + params=import_parameters, + entity_type=str, + as_json=False, + ) + return self._parse_import_response(res) + + def _parse_import_response(self, response: str) -> ImportResponse[TDoc]: + """Parse the import response string into a list of response objects.""" + response_objs: typing.List[ImportResponse] = [] + for res_obj_str in response.split("\n"): + try: + res_obj_json = json.loads(res_obj_str) + except json.JSONDecodeError as decode_error: + raise TypesenseClientError( + f"Invalid response - {res_obj_str}", + ) from decode_error + response_objs.append(res_obj_json) + return response_objs diff --git a/tests/documents_test.py b/tests/documents_test.py index 3796f82..8112be1 100644 --- a/tests/documents_test.py +++ b/tests/documents_test.py @@ -223,7 +223,7 @@ def test_import_fail( """Test that the Documents object doesn't throw an error when importing documents.""" wrong_company: Companies = {"company_name": "Wrong", "id": "0", "num_employees": 0} companies = generate_companies + [wrong_company] - request_spy = mocker.spy(actual_documents, "import_") + request_spy = mocker.spy(actual_documents, "_bulk_import") response = actual_documents.import_(companies) expected: typing.List[typing.Dict[str, typing.Union[str, bool, int]]] = [ @@ -280,12 +280,14 @@ def test_import_batch_size( ) -> None: """Test that the Documents object can import documents in batches.""" batch_size = 5 - document_spy = mocker.spy(actual_documents, "import_") + import_spy = mocker.spy(actual_documents, "import_") + batch_import_spy = mocker.spy(actual_documents, "_bulk_import") request_spy = mocker.spy(actual_api_call, "post") response = actual_documents.import_(generate_companies, batch_size=batch_size) expected = [{"success": True} for _ in generate_companies] - assert document_spy.call_count == len(generate_companies) // batch_size + 1 + assert import_spy.call_count == 1 + assert batch_import_spy.call_count == len(generate_companies) // batch_size assert request_spy.call_count == len(generate_companies) // batch_size assert response == expected @@ -293,38 +295,47 @@ def test_import_batch_size( def test_import_return_docs( generate_companies: typing.List[Companies], actual_documents: Documents[Companies], + mocker: MockFixture, delete_all: None, create_collection: None, ) -> None: """Test that the Documents object can return documents when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") response = actual_documents.import_(generate_companies, {"return_doc": True}) expected = [ {"success": True, "document": company} for company in generate_companies ] + + assert request_spy.call_count == 1 assert response == expected def test_import_return_ids( generate_companies: typing.List[Companies], actual_documents: Documents[Companies], + mocker: MockFixture, delete_all: None, create_collection: None, ) -> None: """Test that the Documents object can return document IDs when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") response = actual_documents.import_(generate_companies, {"return_id": True}) expected = [ {"success": True, "id": company.get("id")} for company in generate_companies ] + assert request_spy.call_count == 1 assert response == expected def test_import_return_ids_and_docs( generate_companies: typing.List[Companies], actual_documents: Documents[Companies], + mocker: MockFixture, delete_all: None, create_collection: None, ) -> None: """Test that the Documents object can return document IDs and documents when importing.""" + request_spy = mocker.spy(actual_documents, "_bulk_import") response = actual_documents.import_( generate_companies, {"return_id": True, "return_doc": True}, @@ -333,6 +344,7 @@ def test_import_return_ids_and_docs( {"success": True, "document": company, "id": company.get("id")} for company in generate_companies ] + assert request_spy.call_count == 1 assert response == expected From 4c79948adbd01bf08775c4e297ddb92944313322 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 9 Sep 2024 22:03:45 +0300 Subject: [PATCH 180/288] refactor: remove unused validation module --- src/typesense/validation.py | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 src/typesense/validation.py diff --git a/src/typesense/validation.py b/src/typesense/validation.py deleted file mode 100644 index 8dc8310..0000000 --- a/src/typesense/validation.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -import sys - -if sys.version_info > (3, 11): - import typing -else: - import typing_extensions as typing - -from typesense.exceptions import InvalidParameter - - -def validate_search(params: typing.Mapping[str, str]) -> None: - for key in params: - if not isinstance(params[key], str): - raise InvalidParameter( - f"'{key}' field expected a string but was given {type(params[key]).__name__}" - ) From a15fa56cbab92da87f0ee711a6847248b8468b62 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 13:46:34 +0300 Subject: [PATCH 181/288] feat: add request handler module for http request management - Authentication via API key - Support for JSON and non-JSON responses - Custom error handling for various HTTP status codes - Parameter normalization The module is compatible with Python 3.11+ and earlier versions. --- src/typesense/request_handler.py | 274 +++++++++++++++++++++++++++++++ tests/import_test.py | 1 + 2 files changed, 275 insertions(+) create mode 100644 src/typesense/request_handler.py diff --git a/src/typesense/request_handler.py b/src/typesense/request_handler.py new file mode 100644 index 0000000..3ef16ca --- /dev/null +++ b/src/typesense/request_handler.py @@ -0,0 +1,274 @@ +""" +This module provides functionality for handling HTTP requests in the Typesense client library. + +Classes: + - RequestHandler: Manages HTTP requests to the Typesense API. + - SessionFunctionKwargs: Type for keyword arguments in session functions. + +The RequestHandler class interacts with the Typesense API to manage HTTP requests, +handle authentication, and process responses. It provides methods to send requests, +normalize parameters, and handle errors. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. + +Key Features: +- Handles authentication via API key +- Supports JSON and non-JSON responses +- Provides custom error handling for various HTTP status codes +- Normalizes boolean parameters for API requests + +Note: This module relies on the 'requests' library for making HTTP requests. +""" + +import json +import sys +from types import MappingProxyType + +import requests + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.configuration import Configuration +from typesense.exceptions import ( + HTTPStatus0Error, + ObjectAlreadyExists, + ObjectNotFound, + ObjectUnprocessable, + RequestForbidden, + RequestMalformed, + RequestUnauthorized, + ServerError, + ServiceUnavailable, + TypesenseClientError, +) + +TEntityDict = typing.TypeVar("TEntityDict") +TParams = typing.TypeVar("TParams") +TBody = typing.TypeVar("TBody") + +_ERROR_CODE_MAP: typing.Mapping[str, typing.Type[TypesenseClientError]] = ( + MappingProxyType( + { + "0": HTTPStatus0Error, + "400": RequestMalformed, + "401": RequestUnauthorized, + "403": RequestForbidden, + "404": ObjectNotFound, + "409": ObjectAlreadyExists, + "422": ObjectUnprocessable, + "500": ServerError, + "503": ServiceUnavailable, + }, + ) +) + + +class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): + """ + Type definition for keyword arguments used in session functions. + + Attributes: + params (Optional[Union[TParams, None]]): Query parameters for the request. + + data (Optional[Union[TBody, str, None]]): Body of the request. + + headers (Optional[Dict[str, str]]): Headers for the request. + + timeout (float): Timeout for the request in seconds. + + verify (bool): Whether to verify SSL certificates. + """ + + params: typing.NotRequired[typing.Union[TParams, None]] + data: typing.NotRequired[typing.Union[TBody, str, None]] + headers: typing.NotRequired[typing.Dict[str, str]] + timeout: float + verify: bool + + +class RequestHandler: + """ + Handles HTTP requests to the Typesense API. + + This class manages authentication, request sending, and response processing + for interactions with the Typesense API. + + Attributes: + api_key_header_name (str): The header name for the API key. + config (Configuration): The configuration object for the Typesense client. + """ + + api_key_header_name: typing.Final[str] = "X-TYPESENSE-API-KEY" + + def __init__(self, config: Configuration): + """ + Initialize the RequestHandler with a configuration. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + + @typing.overload + def make_request( + self, + fn: typing.Callable[..., requests.models.Response], + url: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: + """ + Make an HTTP request to the Typesense API and return the response as a string. + + This overload is used when as_json is set to False, indicating that the response + should be returned as a raw string instead of being parsed as JSON. + + Args: + fn (Callable): The HTTP method function to use (e.g., requests.get). + + url (str): The URL to send the request to. + + entity_type (Type[TEntityDict]): The expected type of the response entity. + + as_json (Literal[False]): Specifies that the response should not be parsed as JSON. + + kwargs: Additional keyword arguments for the request. + + Returns: + str: The raw string response from the API. + + Raises: + TypesenseClientError: If the API returns an error response. + """ + + @typing.overload + def make_request( + self, + fn: typing.Callable[..., requests.models.Response], + url: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True], + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: + """ + Make an HTTP request to the Typesense API. + + Args: + fn (Callable): The HTTP method function to use (e.g., requests.get). + + url (str): The URL to send the request to. + + entity_type (Type[TEntityDict]): The expected type of the response entity. + + as_json (bool): Whether to return the response as JSON. Defaults to True. + + kwargs: Additional keyword arguments for the request. + + Returns: + TEntityDict: The response, as a JSON object. + + Raises: + TypesenseClientError: If the API returns an error response. + """ + + def make_request( + self, + fn: typing.Callable[..., requests.models.Response], + url: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """ + Make an HTTP request to the Typesense API. + + Args: + fn (Callable): The HTTP method function to use (e.g., requests.get). + + url (str): The URL to send the request to. + + entity_type (Type[TEntityDict]): The expected type of the response entity. + + as_json (bool): Whether to return the response as JSON. Defaults to True. + + kwargs: Additional keyword arguments for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + + Raises: + TypesenseClientError: If the API returns an error response. + """ + headers = {self.api_key_header_name: self.config.api_key} + kwargs.setdefault("headers", {}).update(headers) + kwargs.setdefault("timeout", self.config.connection_timeout_seconds) + kwargs.setdefault("verify", self.config.verify) + if kwargs.get("data") and not isinstance(kwargs["data"], (str, bytes)): + kwargs["data"] = json.dumps(kwargs["data"]) + + response = fn(url, **kwargs) + + if response.status_code < 200 or response.status_code >= 300: + error_message = self._get_error_message(response) + raise self._get_exception(response.status_code)( + response.status_code, + error_message, + ) + + if as_json: + res: TEntityDict = response.json() + return res + + return response.text + + @staticmethod + def normalize_params(params: TParams) -> None: + """ + Normalize boolean parameters in the request. + + Args: + params (TParams): The parameters to normalize. + + Raises: + ValueError: If params is not a dictionary. + """ + if not isinstance(params, typing.Dict): + raise ValueError("Params must be a dictionary.") + for key, parameter_value in params.items(): + if isinstance(parameter_value, bool): + params[key] = str(parameter_value).lower() + + @staticmethod + def _get_error_message(response: requests.Response) -> str: + """ + Extract the error message from an API response. + + Args: + response (requests.Response): The API response. + + Returns: + str: The extracted error message or a default message. + """ + content_type = response.headers.get("Content-Type", "") + if content_type.startswith("application/json"): + err_message: str = response.json().get("message", "API error.") + return err_message + return "API error." + + @staticmethod + def _get_exception(http_code: int) -> typing.Type[TypesenseClientError]: + """ + Map an HTTP status code to the appropriate exception type. + + Args: + http_code (int): The HTTP status code. + + Returns: + Type[TypesenseClientError]: The exception type corresponding to the status code. + """ + return _ERROR_CODE_MAP.get(str(http_code), TypesenseClientError) diff --git a/tests/import_test.py b/tests/import_test.py index 72992d5..616ec11 100644 --- a/tests/import_test.py +++ b/tests/import_test.py @@ -32,6 +32,7 @@ "collection", "collections", "configuration", + "request_handler", "conversations_models", "document", "documents", From 5891aded9835711ff58f08e8332affea5e6522f8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 13:48:48 +0300 Subject: [PATCH 182/288] feat: add node manager for cluster configuration handling - Introduce `NodeManager` class to manage Typesense cluster nodes - Implement round-robin node selection strategy - Add nearest node prioritization feature - Include node health tracking and periodic health checks - Enhance fault tolerance and load balancing across the cluster --- src/typesense/node_manager.py | 128 ++++++++++++++++++++++++++++++++++ 1 file changed, 128 insertions(+) create mode 100644 src/typesense/node_manager.py diff --git a/src/typesense/node_manager.py b/src/typesense/node_manager.py new file mode 100644 index 0000000..e671c8d --- /dev/null +++ b/src/typesense/node_manager.py @@ -0,0 +1,128 @@ +""" +This module provides functionality for managing nodes in a Typesense cluster configuration. + +It contains the NodeManager class, which is responsible for node selection, health checks, +and rotation strategies for load balancing and fault tolerance in a Typesense cluster. + +Key features: +- Round-robin node selection +- Nearest node prioritization (if configured) +- Node health tracking and updates +- Periodic health checks based on a configurable interval + +Classes: + NodeManager: Manages the nodes in a Typesense cluster configuration. + +Dependencies: + - typesense.configuration: Provides Configuration and Node classes + - typesense.logger: Provides logging functionality + +Usage: + from typesense.configuration import Configuration + from node_manager import NodeManager + + config = Configuration(...) + node_manager = NodeManager(config) + node = node_manager.get_node() + +Note: This module is part of the Typesense Python client library and is +used internally by other components of the library. +""" + +import copy +import time + +from typesense.configuration import Configuration, Node +from typesense.logger import logger + + +class NodeManager: + """ + Manages the nodes in a Typesense cluster configuration. + + This class handles node selection, health checks, and rotation for load balancing + and fault tolerance in a Typesense cluster. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + nodes (List[Node]): A copy of the nodes from the configuration. + node_index (int): The index of the current node in the rotation. + """ + + def __init__(self, config: Configuration): + """ + Initialize the NodeManager with a given configuration. + + Args: + config (Configuration): The configuration object for the Typesense client. + """ + self.config = config + self.nodes = copy.deepcopy(config.nodes) + self.node_index = 0 + self._initialize_nodes() + + def get_node(self) -> Node: + """ + Get the next available healthy node. + + This method implements a round-robin selection strategy, prioritizing the nearest node + if configured, and considering the health status of each node. + + Returns: + Node: The selected node for the next operation. + """ + if self.config.nearest_node: + if self.config.nearest_node.healthy or self._is_due_for_health_check( + self.config.nearest_node, + ): + return self.config.nearest_node + + node_index = 0 + while node_index < len(self.nodes): + node_index += 1 + node = self.nodes[self.node_index] + self.node_index = (self.node_index + 1) % len(self.nodes) + if node.healthy or self._is_due_for_health_check(node): + return node + + logger.debug("No healthy nodes were found. Returning the next node.") + return self.nodes[self.node_index] + + def set_node_health(self, node: Node, is_healthy: bool) -> None: + """ + Set the health status of a node and update its last access timestamp. + + Args: + node (Node): The node to update. + is_healthy (bool): The health status to set for the node. + """ + node.healthy = is_healthy + node.last_access_ts = int(time.time()) + + def _is_due_for_health_check(self, node: Node) -> bool: + """ + Check if a node is due for a health check based on the configured interval. + + Args: + node (Node): The node to check. + + Returns: + bool: True if the node is due for a health check, False otherwise. + """ + current_epoch_ts = int(time.time()) + return bool( + (current_epoch_ts - node.last_access_ts) + > self.config.healthcheck_interval_seconds, + ) + + def _initialize_nodes(self) -> None: + """ + Initialize all nodes as healthy. + + This method sets the initial health status of all nodes, including the nearest node + if configured, to healthy. + """ + if self.config.nearest_node: + self.set_node_health(self.config.nearest_node, is_healthy=True) + for node in self.nodes: + self.set_node_health(node, is_healthy=True) From 15cdb4eac2a3e93c05337c27f5f7dbc320d01523 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 13:50:11 +0300 Subject: [PATCH 183/288] refactor(api-call): simplify http request logic - Extract node management logic into `NodeManager` class - Update ApiCall to use NodeManager for node operations - Utilize `RequestHandler` for request execution - Adjust tests to reflect new structure and dependencies - Improve error handling and retry logic in ApiCall --- src/typesense/api_call.py | 986 +++++++++-------------------- tests/alias_test.py | 5 +- tests/aliases_test.py | 9 +- tests/analytics_rule_test.py | 5 +- tests/analytics_rules_test.py | 9 +- tests/analytics_test.py | 5 +- tests/api_call_test.py | 90 ++- tests/client_test.py | 8 +- tests/collection_test.py | 5 +- tests/collections_test.py | 9 +- tests/conversation_model_test.py | 5 +- tests/conversations_models_test.py | 9 +- tests/debug_test.py | 5 +- tests/document_test.py | 5 +- tests/documents_test.py | 9 +- tests/key_test.py | 5 +- tests/keys_test.py | 9 +- tests/multi_search_test.py | 5 +- tests/operations_test.py | 5 +- tests/override_test.py | 5 +- tests/overrides_test.py | 9 +- tests/stopwords_set_test.py | 5 +- tests/stopwords_test.py | 9 +- tests/synonym_test.py | 5 +- tests/synonyms_test.py | 9 +- 25 files changed, 483 insertions(+), 747 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 9854406..47d0eec 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -1,66 +1,55 @@ """ -This module is responsible for making HTTP requests to the Typesense API. +This module provides functionality for making API calls to a Typesense server. + +It contains the ApiCall class, which is responsible for executing HTTP requests +to the Typesense API, handling retries, and managing node health. + +Key features: +- Support for GET, POST, PUT, PATCH, and DELETE HTTP methods +- Automatic retries on server errors +- Node health management +- Type-safe request execution with overloaded methods Classes: - - ApiCall: A class that makes HTTP requests to the Typesense API. - -Functions: - - get_exception: Get the exception class for a given HTTP status code. - - normalize_params: Normalize boolean values in the request parameters to strings. - - make_request: Make the actual HTTP request, along with retries. - - node_due_for_health_check: Check if a node is due for a health check. - - set_node_healthcheck: Set the health status of a node and update the - last access timestamp. - - get_node: Get a healthy host from the pool in a round-robin fashion. - - initialize_nodes: Initialize the nodes in the pool. - - get: Make a GET request to the endpoint with the given parameters. - - post: Make a POST request to the endpoint with the given parameters. - - put: Make a PUT request to the endpoint with the given parameters. - - patch: Make a PATCH request to the endpoint with the given parameters. - - delete: Make a DELETE request to the endpoint with the given parameters. - -Exceptions: - - HTTPStatus0Error: An exception raised when the status code is 0. - - RequestMalformed: An exception raised when the status code is 400. - - RequestUnauthorized: An exception raised when the status code is 401. - - RequestForbidden: An exception raised when the status code is 403. - - ObjectNotFound: An exception raised when the status code is 404. - - ObjectAlreadyExists: An exception raised when the status code is 409. - - ObjectUnprocessable: An exception raised when the status code is 422. - - ServerError: An exception raised when the status code is 500. - - ServiceUnavailable: An exception raised when the status code is 503. - - TypesenseClientError: An exception raised when the status code is not one of the above. -""" + ApiCall: Manages API calls to the Typesense server. + +Dependencies: + - requests: For making HTTP requests + - typesense.configuration: Provides Configuration and Node classes + - typesense.exceptions: Custom exception classes + - typesense.node_manager: Provides NodeManager class + - typesense.request_handler: Provides RequestHandler class + +Usage: + from typesense.configuration import Configuration + from api_call import ApiCall -from __future__ import annotations + config = Configuration(...) + api_call = ApiCall(config) + response = api_call.get("/collections", SomeEntityType) + +Note: This module is part of the Typesense Python client library and is used internally +by other components of the library. +""" -import copy -import json import sys -import time import requests from typesense.configuration import Configuration, Node - -if sys.version_info >= (3, 11): - import typing -else: - import typing_extensions as typing - from typesense.exceptions import ( HTTPStatus0Error, - ObjectAlreadyExists, - ObjectNotFound, - ObjectUnprocessable, - RequestForbidden, - RequestMalformed, - RequestUnauthorized, ServerError, ServiceUnavailable, TypesenseClientError, ) -from typesense.logger import logger +from typesense.node_manager import NodeManager +from typesense.request_handler import RequestHandler, SessionFunctionKwargs + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing session = requests.sessions.Session() TParams = typing.TypeVar("TParams") @@ -68,805 +57,448 @@ TEntityDict = typing.TypeVar("TEntityDict") -class SessionFunctionKwargs(typing.Generic[TParams, TBody], typing.TypedDict): - """ - Dictionary of keyword arguments for the session function. - - Attributes: - params (TParams | None): The request parameters. - data (TBody | str): The request body. - timeout (float): The timeout for the request. - verify (bool): Whether to verify - """ - - params: typing.NotRequired[typing.Union[TParams, None]] - data: typing.NotRequired[typing.Union[TBody, str, None]] - timeout: float - verify: bool +_SERVER_ERRORS: typing.Final[ + typing.Tuple[ + typing.Type[requests.exceptions.Timeout], + typing.Type[requests.exceptions.ConnectionError], + typing.Type[requests.exceptions.HTTPError], + typing.Type[requests.exceptions.RequestException], + typing.Type[requests.exceptions.SSLError], + typing.Type[HTTPStatus0Error], + typing.Type[ServerError], + typing.Type[ServiceUnavailable], + ] +] = ( + requests.exceptions.Timeout, + requests.exceptions.ConnectionError, + requests.exceptions.HTTPError, + requests.exceptions.RequestException, + requests.exceptions.SSLError, + HTTPStatus0Error, + ServerError, + ServiceUnavailable, +) class ApiCall: - """Handles API calls to Typesense with retry and node selection logic. + """ + Manages API calls to the Typesense server. - This class manages API requests to Typesense, including node selection, - health checks, retries, and error handling. It supports various HTTP methods - and handles authentication and request formatting. + This class handles the execution of HTTP requests to the Typesense API, + including retries, node health management, and error handling. Attributes: - API_KEY_HEADER_NAME (str): The header name for the API key. - config (Configuration): The configuration for the API client. - nodes (List[Node]): A copy of the nodes from the configuration. - node_index (int): The current index for round-robin node selection. - - Methods: - get_node: Selects a healthy node for the next API call. - make_request: Executes an API request with retries and error handling. - get: Performs a GET request. - post: Performs a POST request. - put: Performs a PUT request. - patch: Performs a PATCH request. - delete: Performs a DELETE request. + config (Configuration): The configuration object for the Typesense client. + node_manager (NodeManager): Manages the nodes in the Typesense cluster. + request_handler (RequestHandler): Handles the execution of individual requests. """ - API_KEY_HEADER_NAME = "X-TYPESENSE-API-KEY" - def __init__(self, config: Configuration): - """Initializes the ApiCall instance with the given configuration. - - Args: - config (Configuration): The configuration for the API client. - """ - self.config = config - self.nodes = copy.deepcopy(self.config.nodes) - self.node_index = 0 - self._initialize_nodes() - - def node_due_for_health_check(self, node: Node) -> bool: - current_epoch_ts = int(time.time()) - due_for_check: bool = ( - current_epoch_ts - node.last_access_ts - ) > self.config.healthcheck_interval_seconds - if due_for_check: - logger.debug( - f"Node {node.host}:{node.port} is due for health check.", - ) - return due_for_check - - # Returns a healthy host from the pool in a round-robin fashion. - # Might return an unhealthy host periodically to check for recovery. - def get_node(self) -> Node: - """ - Return a healthy host from the pool in a round-robin fashion. - - Might return an unhealthy host periodically to check for recovery. - - Returns: - Node: The healthy host from the pool in a round-robin fashion. - """ - if self.config.nearest_node: - if self.config.nearest_node.healthy or self.node_due_for_health_check( - self.config.nearest_node, - ): - logger.debug("Using nearest node.") - return self.config.nearest_node - - logger.debug( - " ".join( - [ - "Nearest node is unhealthy or not due for health check.", - "Falling back to individual nodes.", - ], - ), - ) - - node_index = 0 - while node_index < len(self.nodes): - node_index += 1 - node = self.nodes[self.node_index] - self.node_index = (self.node_index + 1) % len(self.nodes) - - if node.healthy or self.node_due_for_health_check(node): - return node - - # None of the nodes are marked healthy, - # but some of them could have become healthy since last health check. - # So we will just return the next node. - logger.debug("No healthy nodes were found. Returning the next node.") - return self.nodes[self.node_index] - - @staticmethod - def get_exception(http_code: int) -> typing.Type[TypesenseClientError]: - """ - Return the exception class for a given HTTP status code. - - Args: - http_code (int): The HTTP status code. - - Returns: - Type[TypesenseClientError]: The exception class for the given HTTP status code. - """ - if http_code == 0: - return HTTPStatus0Error - elif http_code == 400: - return RequestMalformed - elif http_code == 401: - return RequestUnauthorized - elif http_code == 403: - return RequestForbidden - elif http_code == 404: - return ObjectNotFound - elif http_code == 409: - return ObjectAlreadyExists - elif http_code == 422: - return ObjectUnprocessable - elif http_code == 500: - return ServerError - elif http_code == 503: - return ServiceUnavailable - else: - return TypesenseClientError - - @typing.overload - def make_request( - self, - fn: typing.Callable[..., requests.models.Response], - endpoint: str, - entity_type: type[TEntityDict], - as_json: typing.Literal[True], - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> TEntityDict: - """ - Use a session function to make a request to the endpoint with the given kwargs. - - Args: - fn (Callable[..., requests.models.Response]): The session function to use. - endpoint (str): The endpoint to make the request to. - as_json (bool): Whether to return the response as a JSON object. - kwargs (SessionFunctionKwargs): The keyword arguments for the session function. - - Returns: - TEntityDict: The response from the request in json format. - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. """ - - @typing.overload - def make_request( - self, - fn: typing.Callable[..., requests.models.Response], - endpoint: str, - entity_type: type[TEntityDict], - as_json: typing.Literal[False], - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> str: - """ - Use a session function to make a request to the endpoint with the given kwargs. - - Args: - fn (Callable[..., requests.models.Response]): The session function to use. - endpoint (str): The endpoint to make the request to. - as_json (bool): Whether to return the response as a JSON object. - kwargs (SessionFunctionKwargs): The keyword arguments for the session function. - - Returns: - str: The response from the request in text format. - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. - """ - - # Makes the actual http request, along with retries - def make_request( - self, - fn: typing.Callable[..., requests.models.Response], - endpoint: str, - entity_type: type[TEntityDict], - as_json: typing.Union[typing.Literal[False], typing.Literal[True]], - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], - ) -> typing.Union[TEntityDict, str]: - """ - Use a session function to make a request to the endpoint with the given kwargs. - - Args: - fn (Callable[..., requests.models.Response]): The session function to use. - endpoint (str): The endpoint to make the request to. - as_json (bool): Whether to return the response as a JSON object. - kwargs (SessionFunctionKwargs): The keyword arguments for the session function. - - Returns: - Union[TEntityDict, str]: The response from the request. - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. - """ - num_tries = 0 - last_exception: BaseException - - logger.debug(f"Making {fn.__name__} {endpoint}") - - while num_tries < (self.config.num_retries + 1): - num_tries += 1 - node = self.get_node() - - logger.debug( - f"Try {num_tries} to node {node.host}:{node.port} -- healthy? {node.healthy}", - ) - - try: - url = node.url() + endpoint - if kwargs.get("data") and not isinstance(kwargs["data"], (str, bytes)): - kwargs["data"] = json.dumps(kwargs["data"]) - - response = fn( - url, - headers={ApiCall.API_KEY_HEADER_NAME: self.config.api_key}, - **kwargs, - ) - - # Treat any status code > 0 and < 500 to be an indication that node is healthy - # We exclude 0 since some clients return 0 when request fails - if 0 < response.status_code < 500: - logger.debug( - "".join( - [ - f"{node.host}:{node.port} is healthy.", - f"Status code: {response.status_code}", - ], - ), - ) - self.set_node_healthcheck(node, is_healthy=True) - - # We should raise a custom exception if status code is not 20X - if response.status_code < 200 or response.status_code >= 300: - content_type = response.headers.get("Content-Type", "") - error_message = ( - response.json().get("message", "API error.") - if content_type.startswith("application/json") - else "API error." - ) - # Raised exception will be caught and retried - raise ApiCall.get_exception(response.status_code)( - response.status_code, - error_message, - ) - - if as_json: - # Have to use type hinting to avoid returning any - resposne_json: TEntityDict = response.json() - return resposne_json # noqa: WPS331 - return response.text - except ( - requests.exceptions.Timeout, - requests.exceptions.ConnectionError, - requests.exceptions.HTTPError, - requests.exceptions.RequestException, - requests.exceptions.SSLError, - HTTPStatus0Error, - ServerError, - ServiceUnavailable, - ) as e: - # Catch the exception and retry - self.set_node_healthcheck(node, is_healthy=False) - logger.debug( - " ".join( - [ - f"Request to {node.host}:{node.port} failed", - "because of {connection_error}", - ], - ), - ) - logger.debug( - f"Sleeping for {self.config.retry_interval_seconds} and retrying...", - ) - last_exception = e - time.sleep(self.config.retry_interval_seconds) - - logger.debug(f"No retries left. Raising last exception: {last_exception}") - raise last_exception - - def set_node_healthcheck(self, node: Node, is_healthy: bool) -> None: - """ - Set the health status of the node and updates the last access timestamp. + Initialize the ApiCall instance. Args: - node (Node): The node to set the health status of. - is_healthy (bool): Whether the node is healthy + config (Configuration): The configuration object for the Typesense client. """ - node.healthy = is_healthy - node.last_access_ts = int(time.time()) - - @staticmethod - def normalize_params(params: TParams) -> None: - """ - Normalize boolean values in the request parameters to strings. - - Args: - params (TParams): The request parameters. - """ - if not isinstance(params, typing.Dict): - raise ValueError("Params must be a dictionary.") - for key in params.keys(): - if isinstance(params[key], bool) and params[key]: - params[key] = "true" - elif isinstance(params[key], bool) and not params[key]: - params[key] = "false" + self.config = config + self.node_manager = NodeManager(config) + self.request_handler = RequestHandler(config) @typing.overload def get( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Literal[False], params: typing.Union[TParams, None] = None, ) -> str: """ - Make a GET request to the endpoint with the given parameters. + Execute a GET request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - as_json = True: Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - TEntityDict: The response from the request in json format - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + str: The response, as a string. """ @typing.overload def get( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Literal[True], params: typing.Union[TParams, None] = None, ) -> TEntityDict: """ - Make a GET request to the endpoint with the given parameters. + Execute a GET request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - as_json = False: Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - str: The response from the request in text format - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + EntityDict: The response, as a JSON object. """ def get( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, params: typing.Union[TParams, None] = None, ) -> typing.Union[TEntityDict, str]: """ - Make a GET request to the endpoint with the given parameters. + Execute a GET request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - as_json (bool): Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - Union[TEntityDict, str]: The response from the request - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + Union[TEntityDict, str]: The response, either as a JSON object or a string. """ - return self.make_request( + return self._execute_request( session.get, endpoint, entity_type, - as_json=as_json, + as_json, params=params, - timeout=self.config.connection_timeout_seconds, - verify=self.config.verify, ) @typing.overload def post( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Literal[False], params: typing.Union[TParams, None] = None, body: typing.Union[TBody, None] = None, ) -> str: """ - Make a POST request to the endpoint with the given parameters. + Execute a GET request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - body (TBody): The request body. - as_json = False: Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (False): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - str: The response from the request in text format - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + str: The response, as a string. """ @typing.overload def post( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Literal[True], params: typing.Union[TParams, None] = None, body: typing.Union[TBody, None] = None, ) -> TEntityDict: """ - Make a POST request to the endpoint with the given parameters. + Execute a POST request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - body (TBody): The request body. - as_json = True: Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (True): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - TEntityDict: The response from the request in json format - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + EntityDict: The response, as a JSON object. """ def post( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, params: typing.Union[TParams, None] = None, body: typing.Union[TBody, None] = None, ) -> typing.Union[str, TEntityDict]: """ - Make a POST request to the endpoint with the given parameters. + Execute a POST request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - body (TBody): The request body. - as_json = bool: Whether to return the response as a JSON object. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + as_json (bool): Whether to return the response as JSON. Defaults to True. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - TEntityDict | str: The response from the request - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + Union[TEntityDict, str]: The response, either as a JSON object or a string. """ - if params: - ApiCall.normalize_params(params) - return self.make_request( + return self._execute_request( session.post, endpoint, entity_type, - as_json=as_json, - data=body, + as_json, params=params, - timeout=self.config.connection_timeout_seconds, - verify=self.config.verify, + data=body, ) def put( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], body: TBody, params: typing.Union[TParams, None] = None, ) -> TEntityDict: """ - Make a PUT request to the endpoint with the given parameters. + Execute a PUT request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - body (TBody): The request body. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - TEntityDict: The response from the request in json format - - :raises: - HTTPStatus0Error: If the status code is 0. - - RequestMalformed: If the status code is 400. - - RequestUnauthorized: If the status code is 401. - - RequestForbidden: If the status code is 403. - - ObjectNotFound: If the status code is 404. - - ObjectAlreadyExists: If the status code is 409. - - ObjectUnprocessable: If the status code is 422. - - ServerError: If the status code is 500. - - ServiceUnavailable: If the status code is 503. - - TypesenseClientError: If the status code is not one of the above. + EntityDict: The response, as a JSON object. """ - return self.make_request( + return self._execute_request( session.put, endpoint, entity_type, as_json=True, params=params, data=body, - timeout=self.config.connection_timeout_seconds, - verify=self.config.verify, ) def patch( self, endpoint: str, - entity_type: type[TEntityDict], + entity_type: typing.Type[TEntityDict], body: TBody, - params: typing.Union[TParams , None ]= None, + params: typing.Union[TParams, None] = None, + ) -> TEntityDict: + """ + Execute a PATCH request to the Typesense API. + + Args: + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. + + Returns: + EntityDict: The response, as a JSON object. + """ + return self._execute_request( + session.patch, + endpoint, + entity_type, + as_json=True, + params=params, + data=body, + ) + + def delete( + self, + endpoint: str, + entity_type: typing.Type[TEntityDict], + params: typing.Union[TParams, None] = None, ) -> TEntityDict: """ - Make a PATCH request to the endpoint with the given parameters. + Execute a DELETE request to the Typesense API. Args: - endpoint (str): The endpoint to make the request to. - body (TBody): The request body. - params (TParams | None): The request parameters. + endpoint (str): The API endpoint to call. + entity_type (Type[TEntityDict]): The expected type of the response entity. + params (Union[TParams, None], optional): Query parameters for the request. Returns: - TEntityDict: The response from the request in json format + EntityDict: The response, as a JSON object. + """ + return self._execute_request( + session.delete, + endpoint, + entity_type, + as_json=True, + params=params, + ) - :raises: - HTTPStatus0Error: If the status code is 0. + @typing.overload + def _execute_request( + self, + fn: typing.Callable[..., requests.models.Response], + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[True], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> TEntityDict: + """ + Execute a request to the Typesense API with retry logic. - RequestMalformed: If the status code is 400. + This method handles the actual execution of the request, including + node selection, error handling, and retries. - RequestUnauthorized: If the status code is 401. + Args: + fn (Callable): The HTTP method function to use (e.g., session.get). + + endpoint (str): The API endpoint to call. - RequestForbidden: If the status code is 403. + entity_type (Type[TEntityDict]): The expected type of the response entity. - ObjectNotFound: If the status code is 404. + as_json (bool): Whether to return the response as JSON. Defaults to True. - ObjectAlreadyExists: If the status code is 409. + last_exception (Union[None, Exception], optional): The last exception encountered. - ObjectUnprocessable: If the status code is 422. + num_retries (int): The current number of retries attempted. - ServerError: If the status code is 500. + kwargs: Additional keyword arguments for the request. - ServiceUnavailable: If the status code is 503. + Returns: + TEntityDict: The response, as a JSON object. - TypesenseClientError: If the status code is not one of the above. + Raises: + TypesenseClientError: If all nodes are unhealthy or max retries are exceeded. """ - return self.make_request( - session.patch, - endpoint, - entity_type, - as_json=True, - params=params, - data=body, - timeout=self.config.connection_timeout_seconds, - verify=self.config.verify, - ) - def delete( + @typing.overload + def _execute_request( self, + fn: typing.Callable[..., requests.models.Response], endpoint: str, - entity_type: type[TEntityDict], - params: typing.Union[TParams , None ]= None, - ) -> TEntityDict: + entity_type: typing.Type[TEntityDict], + as_json: typing.Literal[False], + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> str: """ - Make a DELETE request to the endpoint with the given parameters. + Execute a request to the Typesense API with retry logic. + + This method handles the actual execution of the request, including + node selection, error handling, and retries. Args: - endpoint (str): The endpoint to make the request to. - params (TParams | None): The request parameters. + fn (Callable): The HTTP method function to use (e.g., session.get). + + endpoint (str): The API endpoint to call. + + entity_type (Type[TEntityDict]): The expected type of the response entity. + + as_json (bool): Whether to return the response as JSON. Defaults to True. + + last_exception (Union[None, Exception], optional): The last exception encountered. + + num_retries (int): The current number of retries attempted. + + kwargs: Additional keyword arguments for the request. Returns: - TEntityDict: The response from the request in json format + str: The response, as a string. - :raises: - HTTPStatus0Error: If the status code is 0. + Raises: + TypesenseClientError: If all nodes are unhealthy or max retries are exceeded. + """ - RequestMalformed: If the status code is 400. + def _execute_request( + self, + fn: typing.Callable[..., requests.models.Response], + endpoint: str, + entity_type: typing.Type[TEntityDict], + as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, + last_exception: typing.Union[None, Exception] = None, + num_retries: int = 0, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Union[TEntityDict, str]: + """ + Execute a request to the Typesense API with retry logic. - RequestUnauthorized: If the status code is 401. + This method handles the actual execution of the request, including + node selection, error handling, and retries. - RequestForbidden: If the status code is 403. + Args: + fn (Callable): The HTTP method function to use (e.g., session.get). - ObjectNotFound: If the status code is 404. + endpoint (str): The API endpoint to call. - ObjectAlreadyExists: If the status code is 409. + entity_type (Type[TEntityDict]): The expected type of the response entity. - ObjectUnprocessable: If the status code is 422. + as_json (bool): Whether to return the response as JSON. Defaults to True. - ServerError: If the status code is 500. + last_exception (Union[None, Exception], optional): The last exception encountered. - ServiceUnavailable: If the status code is 503. + num_retries (int): The current number of retries attempted. - TypesenseClientError: If the status code is not one of the above. + kwargs: Additional keyword arguments for the request. + + Returns: + Union[TEntityDict, str]: The response, either as a JSON object or a string. + + Raises: + TypesenseClientError: If all nodes are unhealthy or max retries are exceeded. """ - return self.make_request( - session.delete, - endpoint, - entity_type, - as_json=True, - params=params, - timeout=self.config.connection_timeout_seconds, - verify=self.config.verify, + if num_retries > self.config.num_retries: + if last_exception: + raise last_exception + raise TypesenseClientError("All nodes are unhealthy") + + node, url, kwargs = self._prepare_request_params(endpoint, **kwargs) + + try: + return self._make_request_and_process_response( + fn, + url, + entity_type, + as_json, + **kwargs, + ) + except _SERVER_ERRORS as server_error: + self.node_manager.set_node_health(node, is_healthy=False) + return self._execute_request( + fn, + endpoint, + entity_type, + as_json, + last_exception=server_error, + num_retries=num_retries + 1, + **kwargs, + ) + + def _make_request_and_process_response( + self, + fn: typing.Callable[..., requests.models.Response], + url: str, + entity_type: typing.Type[TEntityDict], + as_json: bool, + **kwargs: typing.Any, + ) -> typing.Union[TEntityDict, str]: + """Make the API request and process the response.""" + request_response = self.request_handler.make_request( + fn=fn, + url=url, + as_json=as_json, + entity_type=entity_type, + **kwargs, + ) + self.node_manager.set_node_health(self.node_manager.get_node(), is_healthy=True) + return ( + typing.cast(TEntityDict, request_response) + if as_json + else typing.cast(str, request_response) ) - def _initialize_nodes(self) -> None: - if self.config.nearest_node: - self.set_node_healthcheck(self.config.nearest_node, is_healthy=True) + def _prepare_request_params( + self, + endpoint: str, + **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + ) -> typing.Tuple[Node, str, SessionFunctionKwargs[TParams, TBody]]: + node = self.node_manager.get_node() + url = node.url() + endpoint + + if kwargs.get("params"): + self.request_handler.normalize_params(kwargs["params"]) - for node in self.nodes: - self.set_node_healthcheck(node, is_healthy=True) + return node, url, kwargs diff --git a/tests/alias_test.py b/tests/alias_test.py index e1b514c..b3a74b6 100644 --- a/tests/alias_test.py +++ b/tests/alias_test.py @@ -21,7 +21,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert alias.name == "company_alias" assert_match_object(alias.api_call, fake_api_call) - assert_object_lists_match(alias.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + alias.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( alias.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/aliases_test.py b/tests/aliases_test.py index 3868bcf..314bf97 100644 --- a/tests/aliases_test.py +++ b/tests/aliases_test.py @@ -19,7 +19,10 @@ def test_init(fake_api_call: ApiCall) -> None: aliases = Aliases(fake_api_call) assert_match_object(aliases.api_call, fake_api_call) - assert_object_lists_match(aliases.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + aliases.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( aliases.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -34,7 +37,9 @@ def test_get_missing_alias(fake_aliases: Aliases) -> None: assert alias.name == "company_alias" assert_match_object(alias.api_call, fake_aliases.api_call) - assert_object_lists_match(alias.api_call.nodes, fake_aliases.api_call.nodes) + assert_object_lists_match( + alias.api_call.node_manager.nodes, fake_aliases.api_call.node_manager.nodes + ) assert_match_object( alias.api_call.config.nearest_node, fake_aliases.api_call.config.nearest_node, diff --git a/tests/analytics_rule_test.py b/tests/analytics_rule_test.py index afa8a99..4141c55 100644 --- a/tests/analytics_rule_test.py +++ b/tests/analytics_rule_test.py @@ -17,7 +17,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert analytics_rule.rule_id == "company_analytics_rule" assert_match_object(analytics_rule.api_call, fake_api_call) - assert_object_lists_match(analytics_rule.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( analytics_rule.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py index dd545f4..edad1d8 100644 --- a/tests/analytics_rules_test.py +++ b/tests/analytics_rules_test.py @@ -18,7 +18,10 @@ def test_init(fake_api_call: ApiCall) -> None: analytics_rules = AnalyticsRules(fake_api_call) assert_match_object(analytics_rules.api_call, fake_api_call) - assert_object_lists_match(analytics_rules.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + analytics_rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( analytics_rules.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -34,8 +37,8 @@ def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRules) -> Non assert analytics_rule.rule_id == "company_analytics_rule" assert_match_object(analytics_rule.api_call, fake_analytics_rules.api_call) assert_object_lists_match( - analytics_rule.api_call.nodes, - fake_analytics_rules.api_call.nodes, + analytics_rule.api_call.node_manager.nodes, + fake_analytics_rules.api_call.node_manager.nodes, ) assert_match_object( analytics_rule.api_call.config.nearest_node, diff --git a/tests/analytics_test.py b/tests/analytics_test.py index 11790e5..e2e4441 100644 --- a/tests/analytics_test.py +++ b/tests/analytics_test.py @@ -10,7 +10,10 @@ def test_init(fake_api_call: ApiCall) -> None: analytics = Analytics(fake_api_call) assert_match_object(analytics.rules.api_call, fake_api_call) - assert_object_lists_match(analytics.rules.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( analytics.rules.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 63a6b3a..e49fb4c 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -20,7 +20,7 @@ from tests.utils.object_assertions import assert_match_object, assert_object_lists_match from typesense import exceptions -from typesense.api_call import ApiCall +from typesense.api_call import ApiCall, RequestHandler from typesense.configuration import Configuration, Node from typesense.logger import logger @@ -76,8 +76,8 @@ def test_initialization( ) -> None: """Test the initialization of the ApiCall object.""" assert api_call.config == config - assert_object_lists_match(api_call.nodes, config.nodes) - assert api_call.node_index == 0 + assert_object_lists_match(api_call.node_manager.nodes, config.nodes) + assert api_call.node_manager.node_index == 0 def test_node_due_for_health_check( @@ -86,14 +86,14 @@ def test_node_due_for_health_check( """Test that it correctly identifies if a node is due for health check.""" node = Node(host="localhost", port=8108, protocol="http", path=" ") node.last_access_ts = time.time() - 61 - assert api_call.node_due_for_health_check(node) is True + assert api_call.node_manager._is_due_for_health_check(node) is True def test_get_node_nearest_healthy( api_call: ApiCall, ) -> None: """Test that it correctly selects the nearest node if it is healthy.""" - node = api_call.get_node() + node = api_call.node_manager.get_node() assert_match_object(node, api_call.config.nearest_node) @@ -102,8 +102,8 @@ def test_get_node_nearest_not_healthy( ) -> None: """Test that it selects the next available node if the nearest node is not healthy.""" api_call.config.nearest_node.healthy = False - node = api_call.get_node() - assert_match_object(node, api_call.nodes[0]) + node = api_call.node_manager.get_node() + assert_match_object(node, api_call.node_manager.nodes[0]) def test_get_node_round_robin_selection( @@ -114,34 +114,34 @@ def test_get_node_round_robin_selection( api_call.config.nearest_node = None mocker.patch("time.time", return_value=100) - node1 = api_call.get_node() + node1 = api_call.node_manager.get_node() assert_match_object(node1, api_call.config.nodes[0]) - node2 = api_call.get_node() + node2 = api_call.node_manager.get_node() assert_match_object(node2, api_call.config.nodes[1]) - node3 = api_call.get_node() + node3 = api_call.node_manager.get_node() assert_match_object(node3, api_call.config.nodes[2]) def test_get_exception() -> None: """Test that it correctly returns the exception class for a given status code.""" - assert ApiCall.get_exception(0) == exceptions.HTTPStatus0Error - assert ApiCall.get_exception(400) == exceptions.RequestMalformed - assert ApiCall.get_exception(401) == exceptions.RequestUnauthorized - assert ApiCall.get_exception(403) == exceptions.RequestForbidden - assert ApiCall.get_exception(404) == exceptions.ObjectNotFound - assert ApiCall.get_exception(409) == exceptions.ObjectAlreadyExists - assert ApiCall.get_exception(422) == exceptions.ObjectUnprocessable - assert ApiCall.get_exception(500) == exceptions.ServerError - assert ApiCall.get_exception(503) == exceptions.ServiceUnavailable - assert ApiCall.get_exception(999) == exceptions.TypesenseClientError + assert RequestHandler._get_exception(0) == exceptions.HTTPStatus0Error + assert RequestHandler._get_exception(400) == exceptions.RequestMalformed + assert RequestHandler._get_exception(401) == exceptions.RequestUnauthorized + assert RequestHandler._get_exception(403) == exceptions.RequestForbidden + assert RequestHandler._get_exception(404) == exceptions.ObjectNotFound + assert RequestHandler._get_exception(409) == exceptions.ObjectAlreadyExists + assert RequestHandler._get_exception(422) == exceptions.ObjectUnprocessable + assert RequestHandler._get_exception(500) == exceptions.ServerError + assert RequestHandler._get_exception(503) == exceptions.ServiceUnavailable + assert RequestHandler._get_exception(999) == exceptions.TypesenseClientError def test_normalize_params_with_booleans() -> None: """Test that it correctly normalizes boolean values to strings.""" parameter_dict: typing.Dict[str, str | bool] = {"key1": True, "key2": False} - ApiCall.normalize_params(parameter_dict) + RequestHandler.normalize_params(parameter_dict) assert parameter_dict == {"key1": "true", "key2": "false"} @@ -151,13 +151,13 @@ def test_normalize_params_with_non_dict() -> None: parameter_non_dict = "string" with pytest.raises(ValueError): - ApiCall.normalize_params(parameter_non_dict) + RequestHandler.normalize_params(parameter_non_dict) def test_normalize_params_with_mixed_types() -> None: """Test that it correctly normalizes boolean values to strings.""" parameter_dict = {"key1": True, "key2": False, "key3": "value", "key4": 123} - ApiCall.normalize_params(parameter_dict) + RequestHandler.normalize_params(parameter_dict) assert parameter_dict == { "key1": "true", "key2": "false", @@ -169,14 +169,14 @@ def test_normalize_params_with_mixed_types() -> None: def test_normalize_params_with_empty_dict() -> None: """Test that it correctly normalizes an empty dictionary.""" parameter_dict: typing.Dict[str, str] = {} - ApiCall.normalize_params(parameter_dict) + RequestHandler.normalize_params(parameter_dict) assert not parameter_dict def test_normalize_params_with_no_booleans() -> None: """Test that it correctly normalizes a dictionary with no boolean values.""" parameter_dict = {"key1": "value", "key2": 123} - ApiCall.normalize_params(parameter_dict) + RequestHandler.normalize_params(parameter_dict) assert parameter_dict == {"key1": "value", "key2": 123} @@ -191,7 +191,7 @@ def test_make_request_as_json(api_call: ApiCall) -> None: status_code=200, ) - response = api_call.make_request( + response = api_call._execute_request( session.get, "/test", as_json=True, @@ -211,7 +211,7 @@ def test_make_request_as_text(api_call: ApiCall) -> None: status_code=200, ) - response = api_call.make_request( + response = api_call._execute_request( session.get, "/test", as_json=False, @@ -387,7 +387,7 @@ def test_raise_custom_exception_with_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call.make_request( + api_call._execute_request( requests.get, "/test", as_json=True, @@ -408,7 +408,7 @@ def test_raise_custom_exception_without_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call.make_request( + api_call._execute_request( requests.get, "/test", as_json=True, @@ -456,24 +456,30 @@ def test_get_node_no_healthy_nodes( caplog: pytest.LogCaptureFixture, ) -> None: """Test that it logs a message if no healthy nodes are found.""" - for api_node in api_call.nodes: + for api_node in api_call.node_manager.nodes: api_node.healthy = False api_call.config.nearest_node.healthy = False - mocker.patch.object(api_call, "node_due_for_health_check", return_value=False) + mocker.patch.object( + api_call.node_manager, + "_is_due_for_health_check", + return_value=False, + ) # Need to set the logger level to DEBUG to capture the message logger.setLevel(logging.DEBUG) - selected_node = api_call.get_node() + selected_node = api_call.node_manager.get_node() with caplog.at_level(logging.DEBUG): assert "No healthy nodes were found. Returning the next node." in caplog.text - assert selected_node == api_call.nodes[api_call.node_index] + assert ( + selected_node == api_call.node_manager.nodes[api_call.node_manager.node_index] + ) - assert api_call.node_index == 0 + assert api_call.node_manager.node_index == 0 def test_raises_if_no_nodes_are_healthy_with_the_last_exception( @@ -579,3 +585,19 @@ def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 assert request_mocker.request_history[11].url == "http://nearest:8108/" assert request_mocker.request_history[12].url == "http://nearest:8108/" assert request_mocker.request_history[13].url == "http://nearest:8108/" + + +def test_max_retries_no_last_exception(api_call: ApiCall) -> None: + """Test that it raises if the maximum number of retries is reached.""" + with pytest.raises( + exceptions.TypesenseClientError, + match="All nodes are unhealthy", + ): + api_call._execute_request( + requests.get, + "/", + as_json=True, + entity_type=typing.Dict[str, str], + num_retries=10, + last_exception=None, + ) diff --git a/tests/client_test.py b/tests/client_test.py index d7d032d..b25f9e9 100644 --- a/tests/client_test.py +++ b/tests/client_test.py @@ -12,7 +12,9 @@ def test_client_init(fake_config_dict: ConfigDict) -> None: assert fake_client.config == fake_client.api_call.config assert_match_object(fake_client.api_call.config, fake_client.config) - assert_object_lists_match(fake_client.api_call.nodes, fake_client.config.nodes) + assert_object_lists_match( + fake_client.api_call.node_manager.nodes, fake_client.config.nodes + ) assert_match_object( fake_client.api_call.config.nearest_node, fake_client.config.nearest_node, @@ -62,7 +64,9 @@ def test_retrieve_collection_actual( def test_retrieve_collection_actual_no_name( - actual_client: Client, delete_all: None, create_collection: None, + actual_client: Client, + delete_all: None, + create_collection: None, ) -> None: """Test that the client can retrieve an actual collection.""" collection = actual_client.typed_collection(model=Companies) diff --git a/tests/collection_test.py b/tests/collection_test.py index 0d0ef00..d292be3 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -23,7 +23,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert collection.name == "companies" assert_match_object(collection.api_call, fake_api_call) - assert_object_lists_match(collection.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + collection.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( collection.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/collections_test.py b/tests/collections_test.py index b345740..7df3a60 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -22,7 +22,10 @@ def test_init(fake_api_call: ApiCall) -> None: collections = Collections(fake_api_call) assert_match_object(collections.api_call, fake_api_call) - assert_object_lists_match(collections.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + collections.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( collections.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -37,8 +40,8 @@ def test_get_missing_collection(fake_collections: Collections) -> None: assert collection.name == "companies" assert_match_object(collection.api_call, fake_collections.api_call) assert_object_lists_match( - collection.api_call.nodes, - fake_collections.api_call.nodes, + collection.api_call.node_manager.nodes, + fake_collections.api_call.node_manager.nodes, ) assert_match_object( collection.api_call.config.nearest_node, diff --git a/tests/conversation_model_test.py b/tests/conversation_model_test.py index 4f46c7e..43b8bab 100644 --- a/tests/conversation_model_test.py +++ b/tests/conversation_model_test.py @@ -31,7 +31,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert conversation_model.model_id == "conversation_model_id" assert_match_object(conversation_model.api_call, fake_api_call) - assert_object_lists_match(conversation_model.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + conversation_model.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( conversation_model.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/conversations_models_test.py b/tests/conversations_models_test.py index 8b6979f..32eb2b9 100644 --- a/tests/conversations_models_test.py +++ b/tests/conversations_models_test.py @@ -29,7 +29,10 @@ def test_init(fake_api_call: ApiCall) -> None: conversations_models = ConversationsModels(fake_api_call) assert_match_object(conversations_models.api_call, fake_api_call) - assert_object_lists_match(conversations_models.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + conversations_models.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( conversations_models.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -49,8 +52,8 @@ def test_get_missing_conversations_model( fake_conversations_models.api_call, ) assert_object_lists_match( - conversations_model.api_call.nodes, - fake_conversations_models.api_call.nodes, + conversations_model.api_call.node_manager.nodes, + fake_conversations_models.api_call.node_manager.nodes, ) assert_match_object( conversations_model.api_call.config.nearest_node, diff --git a/tests/debug_test.py b/tests/debug_test.py index 5970b6e..d491d17 100644 --- a/tests/debug_test.py +++ b/tests/debug_test.py @@ -17,7 +17,10 @@ def test_init(fake_api_call: ApiCall) -> None: ) assert_match_object(debug.api_call, fake_api_call) - assert_object_lists_match(debug.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + debug.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( debug.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/document_test.py b/tests/document_test.py index 0392f1b..42e1cba 100644 --- a/tests/document_test.py +++ b/tests/document_test.py @@ -22,7 +22,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert document.document_id == "0" assert document.collection_name == "companies" assert_match_object(document.api_call, fake_api_call) - assert_object_lists_match(document.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + document.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( document.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/documents_test.py b/tests/documents_test.py index 8112be1..f0572e6 100644 --- a/tests/documents_test.py +++ b/tests/documents_test.py @@ -28,7 +28,10 @@ def test_init(fake_api_call: ApiCall) -> None: documents = Documents(fake_api_call, "companies") assert_match_object(documents.api_call, fake_api_call) - assert_object_lists_match(documents.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + documents.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( documents.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -42,7 +45,9 @@ def test_get_missing_document(fake_documents: Documents) -> None: document = fake_documents["1"] assert_match_object(document.api_call, fake_documents.api_call) - assert_object_lists_match(document.api_call.nodes, fake_documents.api_call.nodes) + assert_object_lists_match( + document.api_call.node_manager.nodes, fake_documents.api_call.node_manager.nodes + ) assert_match_object( document.api_call.config.nearest_node, fake_documents.api_call.config.nearest_node, diff --git a/tests/key_test.py b/tests/key_test.py index 4603f94..5c06e16 100644 --- a/tests/key_test.py +++ b/tests/key_test.py @@ -21,7 +21,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert key.key_id == 3 assert_match_object(key.api_call, fake_api_call) - assert_object_lists_match(key.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + key.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( key.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/keys_test.py b/tests/keys_test.py index b3538c2..019d17d 100644 --- a/tests/keys_test.py +++ b/tests/keys_test.py @@ -25,7 +25,10 @@ def test_init(fake_api_call: ApiCall) -> None: keys = Keys(fake_api_call) assert_match_object(keys.api_call, fake_api_call) - assert_object_lists_match(keys.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + keys.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( keys.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -39,7 +42,9 @@ def test_get_missing_key(fake_keys: Keys) -> None: key = fake_keys[1] assert_match_object(key.api_call, fake_keys.api_call) - assert_object_lists_match(key.api_call.nodes, fake_keys.api_call.nodes) + assert_object_lists_match( + key.api_call.node_manager.nodes, fake_keys.api_call.node_manager.nodes + ) assert_match_object( key.api_call.config.nearest_node, fake_keys.api_call.config.nearest_node, diff --git a/tests/multi_search_test.py b/tests/multi_search_test.py index cdd751a..59727a7 100644 --- a/tests/multi_search_test.py +++ b/tests/multi_search_test.py @@ -19,7 +19,10 @@ def test_init(fake_api_call: ApiCall) -> None: documents = MultiSearch(fake_api_call) assert_match_object(documents.api_call, fake_api_call) - assert_object_lists_match(documents.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + documents.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( documents.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/operations_test.py b/tests/operations_test.py index 12b5853..34bb74c 100644 --- a/tests/operations_test.py +++ b/tests/operations_test.py @@ -16,7 +16,10 @@ def test_init(fake_api_call: ApiCall) -> None: operations = Operations(fake_api_call) assert_match_object(operations.api_call, fake_api_call) - assert_object_lists_match(operations.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + operations.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( operations.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/override_test.py b/tests/override_test.py index 5cd8fb3..25b05fd 100644 --- a/tests/override_test.py +++ b/tests/override_test.py @@ -22,7 +22,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert override.collection_name == "companies" assert override.override_id == "company_override" assert_match_object(override.api_call, fake_api_call) - assert_object_lists_match(override.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + override.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( override.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/overrides_test.py b/tests/overrides_test.py index 313963c..872fe54 100644 --- a/tests/overrides_test.py +++ b/tests/overrides_test.py @@ -19,7 +19,10 @@ def test_init(fake_api_call: ApiCall) -> None: overrides = Overrides(fake_api_call, "companies") assert_match_object(overrides.api_call, fake_api_call) - assert_object_lists_match(overrides.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + overrides.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( overrides.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -34,7 +37,9 @@ def test_get_missing_override(fake_overrides: Overrides) -> None: assert override.override_id == "company_override" assert_match_object(override.api_call, fake_overrides.api_call) - assert_object_lists_match(override.api_call.nodes, fake_overrides.api_call.nodes) + assert_object_lists_match( + override.api_call.node_manager.nodes, fake_overrides.api_call.node_manager.nodes + ) assert_match_object( override.api_call.config.nearest_node, fake_overrides.api_call.config.nearest_node, diff --git a/tests/stopwords_set_test.py b/tests/stopwords_set_test.py index 4bc21f4..0ddcf9a 100644 --- a/tests/stopwords_set_test.py +++ b/tests/stopwords_set_test.py @@ -17,7 +17,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert stopword_set.stopwords_set_id == "company_stopwords" assert_match_object(stopword_set.api_call, fake_api_call) - assert_object_lists_match(stopword_set.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + stopword_set.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( stopword_set.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/stopwords_test.py b/tests/stopwords_test.py index 4c12847..a7841d7 100644 --- a/tests/stopwords_test.py +++ b/tests/stopwords_test.py @@ -19,7 +19,10 @@ def test_init(fake_api_call: ApiCall) -> None: stopwords = Stopwords(fake_api_call) assert_match_object(stopwords.api_call, fake_api_call) - assert_object_lists_match(stopwords.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + stopwords.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( stopwords.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -34,7 +37,9 @@ def test_get_missing_stopword(fake_stopwords: Stopwords) -> None: assert stopword.stopwords_set_id == "company_stopwords" assert_match_object(stopword.api_call, fake_stopwords.api_call) - assert_object_lists_match(stopword.api_call.nodes, fake_stopwords.api_call.nodes) + assert_object_lists_match( + stopword.api_call.node_manager.nodes, fake_stopwords.api_call.node_manager.nodes + ) assert_match_object( stopword.api_call.config.nearest_node, fake_stopwords.api_call.config.nearest_node, diff --git a/tests/synonym_test.py b/tests/synonym_test.py index 28a3e13..98caa08 100644 --- a/tests/synonym_test.py +++ b/tests/synonym_test.py @@ -22,7 +22,10 @@ def test_init(fake_api_call: ApiCall) -> None: assert synonym.collection_name == "companies" assert synonym.synonym_id == "company_synonym" assert_match_object(synonym.api_call, fake_api_call) - assert_object_lists_match(synonym.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( synonym.api_call.config.nearest_node, fake_api_call.config.nearest_node, diff --git a/tests/synonyms_test.py b/tests/synonyms_test.py index 2bedb27..2071dbc 100644 --- a/tests/synonyms_test.py +++ b/tests/synonyms_test.py @@ -19,7 +19,10 @@ def test_init(fake_api_call: ApiCall) -> None: synonyms = Synonyms(fake_api_call, "companies") assert_match_object(synonyms.api_call, fake_api_call) - assert_object_lists_match(synonyms.api_call.nodes, fake_api_call.nodes) + assert_object_lists_match( + synonyms.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) assert_match_object( synonyms.api_call.config.nearest_node, fake_api_call.config.nearest_node, @@ -34,7 +37,9 @@ def test_get_missing_synonym(fake_synonyms: Synonyms) -> None: assert synonym.synonym_id == "company_synonym" assert_match_object(synonym.api_call, fake_synonyms.api_call) - assert_object_lists_match(synonym.api_call.nodes, fake_synonyms.api_call.nodes) + assert_object_lists_match( + synonym.api_call.node_manager.nodes, fake_synonyms.api_call.node_manager.nodes + ) assert_match_object( synonym.api_call.config.nearest_node, fake_synonyms.api_call.config.nearest_node, From 64b865884a77f0cbd76a542c1d2e9a818b4c6ee5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 13:57:27 +0300 Subject: [PATCH 184/288] refactor(preprocess): refactor stringify function in preprocess module - Rename 'val' parameter to 'argument' for clarity - Simplify conditional logic for boolean and integer handling --- src/typesense/preprocess.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py index 2a483f3..3d23f8e 100644 --- a/src/typesense/preprocess.py +++ b/src/typesense/preprocess.py @@ -24,13 +24,14 @@ StringifiedParamSchema: typing.TypeAlias = typing.Dict[str, str] -def stringify(val: _Types) -> str: - if not isinstance(val, (str, int, bool)): - raise InvalidParameter(f"Value {val} is not a string, integer, or boolean.") - if isinstance(val, bool) or isinstance(val, int): - return str(val).lower() - else: - return val +def stringify(argument: _Types) -> str: + if not isinstance(argument, (str, int, bool)): + raise InvalidParameter( + f"Value {argument} is not a string, integer, or boolean.", + ) + if isinstance(argument, (bool, int)): + return str(argument).lower() + return argument def process_param_list( From 35da21834ba06c9ec66b7386eb1262eaa4a71d9e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 14:10:32 +0300 Subject: [PATCH 185/288] refactor(preprocess): format preprocess module based on linting rules --- src/typesense/preprocess.py | 62 ++++++++++++++++++++++++++++++++----- 1 file changed, 54 insertions(+), 8 deletions(-) diff --git a/src/typesense/preprocess.py b/src/typesense/preprocess.py index 3d23f8e..b45db0c 100644 --- a/src/typesense/preprocess.py +++ b/src/typesense/preprocess.py @@ -1,3 +1,33 @@ +""" +Functionality for preprocessing parameters in the Typesense Python client library. + +This module contains utility functions for converting various data types to strings and +processing parameter lists and dictionaries. These functions are used to prepare +data for API requests to Typesense. + +Key features: +- Convert individual values (int, str, bool) to strings +- Process lists of parameters into comma-separated strings +- Stringify search parameter dictionaries + +Functions: + stringify: Convert a single value to a string. + process_param_list: Convert a list of parameters to a comma-separated string. + stringify_search_params: Convert a dictionary of search parameters to strings. + +Types: + _ListTypes: Type alias for a list of strings, integers, or booleans. + _Types: Type alias for a single string, integer, or boolean. + ParamSchema: Type alias for a dictionary of search parameters. + StringifiedParamSchema: Type alias for a dictionary of stringified search parameters. + +Dependencies: + - typesense.exceptions: Provides InvalidParameter exception + - typing or typing_extensions: For type hinting + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + import sys from typesense.exceptions import InvalidParameter @@ -7,11 +37,8 @@ else: import typing_extensions as typing - _ListTypes = typing.List[typing.Union[str, int, bool]] - _Types = typing.Union[int, str, bool] - ParamSchema: typing.TypeAlias = typing.Dict[ str, typing.Union[ @@ -19,12 +46,30 @@ _ListTypes, ], ] - - StringifiedParamSchema: typing.TypeAlias = typing.Dict[str, str] def stringify(argument: _Types) -> str: + """ + Convert a single value to a string. + + Args: + argument (_Types): The value to be converted to a string. + + Returns: + str: The stringified version of the input. + + Raises: + InvalidParameter: If the input is not a string, integer, or boolean. + + Examples: + >>> stringify(True) + 'true' + >>> stringify(42) + '42' + >>> stringify("Hello") + 'Hello' + """ if not isinstance(argument, (str, int, bool)): raise InvalidParameter( f"Value {argument} is not a string, integer, or boolean.", @@ -69,6 +114,9 @@ def stringify_search_params(parameter_dict: ParamSchema) -> StringifiedParamSche """ Convert the search parameters to strings. + This function takes a dictionary of search parameters and converts all values + to their string representations. List values are converted to comma-separated strings. + Args: parameter_dict (ParamSchema): The search parameters. @@ -86,8 +134,7 @@ def stringify_search_params(parameter_dict: ParamSchema) -> StringifiedParamSche >>> stringify_search_params({"a": [True, False, True], "b": [1, 2, 3]}) {"a": "true,false,true", "b": "1,2,3"} """ - stringified_params = {} - + stringified_params: StringifiedParamSchema = {} for key, param_value in parameter_dict.items(): if isinstance(param_value, list): stringified_params[key] = process_param_list(param_value) @@ -97,5 +144,4 @@ def stringify_search_params(parameter_dict: ParamSchema) -> StringifiedParamSche raise InvalidParameter( f"Value {param_value} is not a string, integer, or boolean", ) - return stringified_params From b2c8c18a43784e9aaa2711079286ea7f235d961d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 14:17:42 +0300 Subject: [PATCH 186/288] refactor(operations): format operations class based on linting rules --- src/typesense/operations.py | 177 +++++++++++++++++++++++++++++++----- 1 file changed, 156 insertions(+), 21 deletions(-) diff --git a/src/typesense/operations.py b/src/typesense/operations.py index a743c39..a2e7782 100644 --- a/src/typesense/operations.py +++ b/src/typesense/operations.py @@ -1,3 +1,21 @@ +""" +This module provides functionality for performing various operations in the Typesense API. + +It contains the Operations class, which handles different API operations such as +health checks, snapshots, and configuration changes. + +Classes: + Operations: Manages various operations in the Typesense API. + +Dependencies: + - typesense.types.operations: + Provides type definitions for operation responses and parameters. + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.configuration: Provides the Configuration class. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + import sys from typesense.types.operations import ( @@ -12,67 +30,149 @@ else: import typing_extensions as typing -from typesense import configuration from typesense.api_call import ApiCall -from typesense.configuration import Configuration -class Operations(object): - RESOURCE_PATH = "/operations" - HEALTH_PATH = "/health" - CONFIG_PATH = "/config" +class Operations: + """ + Manages various operations in the Typesense API. + + This class provides methods to perform different operations such as + health checks, snapshots, and configuration changes. + + Attributes: + resource_path (str): The base path for operations endpoints. + healht_path (str): The path for the health check endpoint. + config_path (str): The path for the configuration endpoint. + api_call (ApiCall): The ApiCall instance for making API requests. + """ + + resource_path: typing.Final[str] = "/operations" + healht_path: typing.Final[str] = "/health" + config_path: typing.Final[str] = "/config" def __init__(self, api_call: ApiCall): - self.api_call = api_call + """ + Initialize the Operations instance. - @staticmethod - def _endpoint_path(operation_name: str) -> str: - return "{0}/{1}".format(Operations.RESOURCE_PATH, operation_name) + Args: + api_call (ApiCall): The ApiCall instance for making API requests. + """ + self.api_call = api_call @typing.overload def perform( self, operation_name: typing.Literal["vote"], query_params: None = None, - ) -> OperationResponse: ... + ) -> OperationResponse: + """ + Perform a vote operation. + + Args: + operation_name (Literal["vote"]): The name of the operation. + query_params (None, optional): Query parameters (not used for vote operation). + + Returns: + OperationResponse: The response from the vote operation. + """ @typing.overload def perform( self, operation_name: typing.Literal["db/compact"], query_params: None = None, - ) -> OperationResponse: ... + ) -> OperationResponse: + """ + Perform a database compaction operation. + + Args: + operation_name (Literal["db/compact"]): The name of the operation. + query_params (None, optional): Query parameters (not used for db/compact operation). + + Returns: + OperationResponse: The response from the database compaction operation. + """ @typing.overload def perform( self, operation_name: typing.Literal["cache/clear"], query_params: None = None, - ) -> OperationResponse: ... + ) -> OperationResponse: + """ + Perform a cache clear operation. + + Args: + operation_name (Literal["cache/clear"]): The name of the operation. + query_params (None, optional): + Query parameters (not used for cache/clear operation). + + Returns: + OperationResponse: The response from the cache clear operation. + """ @typing.overload def perform( self, operation_name: str, query_params: typing.Union[typing.Dict[str, str], None] = None, - ) -> OperationResponse: ... + ) -> OperationResponse: + """ + Perform a generic operation. + + Args: + operation_name (str): The name of the operation. + query_params (Union[Dict[str, str], None], optional): + Query parameters for the operation. + + Returns: + OperationResponse: The response from the operation. + """ @typing.overload def perform( self, operation_name: typing.Literal["snapshot"], query_params: SnapshotParameters, - ) -> OperationResponse: ... + ) -> OperationResponse: + """ + Perform a snapshot operation. + + Args: + operation_name (Literal["snapshot"]): The name of the operation. + query_params (SnapshotParameters): Query parameters for the snapshot operation. + + Returns: + OperationResponse: The response from the snapshot operation. + """ def perform( self, operation_name: typing.Union[ - typing.Literal["snapshot, vote, db/compact, cache/clear"], str + typing.Literal["snapshot, vote, db/compact, cache/clear"], + str, ], query_params: typing.Union[ - SnapshotParameters, typing.Dict[str, str], None + SnapshotParameters, + typing.Dict[str, str], + None, ] = None, ) -> OperationResponse: + """ + Perform an operation on the Typesense API. + + This method is the actual implementation for all the overloaded perform methods. + + Args: + operation_name (Literal["snapshot, vote, db/compact, cache/clear"]): + The name of the operation to perform. + query_params (Union[SnapshotParameters, None], optional): + Query parameters for the operation. + + Returns: + OperationResponse: The response from the performed operation. + """ response: OperationResponse = self.api_call.post( self._endpoint_path(operation_name), params=query_params, @@ -82,8 +182,16 @@ def perform( return response def is_healthy(self) -> bool: + """ + Check if the Typesense server is healthy. + + Returns: + bool: True if the server is healthy, False otherwise. + """ call_resp = self.api_call.get( - Operations.HEALTH_PATH, as_json=True, entity_type=HealthCheckResponse + Operations.healht_path, + as_json=True, + entity_type=HealthCheckResponse, ) if isinstance(call_resp, typing.Dict): is_ok: bool = call_resp.get("ok", False) @@ -92,13 +200,40 @@ def is_healthy(self) -> bool: return is_ok def toggle_slow_request_log( - self, data: LogSlowRequestsTimeParams + self, + log_slow_requests_time_params: LogSlowRequestsTimeParams, ) -> typing.Dict[str, typing.Union[str, bool]]: - data_dashed = {key.replace("_", "-"): value for key, value in data.items()} + """ + Toggle the slow request log configuration. + + Args: + log_slow_requests_time_params (LogSlowRequestsTimeParams): + Parameters for configuring slow request logging. + + Returns: + Dict[str, Union[str, bool]]: The response from the configuration change operation. + """ + data_dashed = { + key.replace("_", "-"): dashed_value + for key, dashed_value in log_slow_requests_time_params.items() + } response: typing.Dict[str, typing.Union[str, bool]] = self.api_call.post( - Operations.CONFIG_PATH, + Operations.config_path, as_json=True, entity_type=typing.Dict[str, typing.Union[str, bool]], body=data_dashed, ) return response + + @staticmethod + def _endpoint_path(operation_name: str) -> str: + """ + Generate the endpoint path for a given operation. + + Args: + operation_name (str): The name of the operation. + + Returns: + str: The full endpoint path for the operation. + """ + return "/".join([Operations.resource_path, operation_name]) From 5b3aacb6d421a19d16dfd37a65b5e71cf2447743 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 14:22:30 +0300 Subject: [PATCH 187/288] refactor(multi-search): format multi-search class based on linting rules --- src/typesense/multi_search.py | 63 +++++++++++++++++++++++++++++++++-- 1 file changed, 60 insertions(+), 3 deletions(-) diff --git a/src/typesense/multi_search.py b/src/typesense/multi_search.py index c48df5f..47a9fa9 100644 --- a/src/typesense/multi_search.py +++ b/src/typesense/multi_search.py @@ -1,3 +1,24 @@ +""" +This module provides functionality for performing multi-search operations in the Typesense API. + +It contains the MultiSearch class, which allows for executing multiple search queries +in a single API call. + +Classes: + MultiSearch: Manages multi-search operations in the Typesense API. + +Dependencies: + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.preprocess: + Provides the stringify_search_params function for parameter processing. + - typesense.types.document: + Provides the MultiSearchCommonParameters type. + - typesense.types.multi_search: + Provides MultiSearchRequestSchema and MultiSearchResponse types. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + import sys from typesense.api_call import ApiCall @@ -11,10 +32,26 @@ import typing_extensions as typing -class MultiSearch(object): - RESOURCE_PATH = "/multi_search" +class MultiSearch: + """ + Manages multi-search operations in the Typesense API. + + This class provides methods to perform multiple search queries in a single API call. + + Attributes: + RESOURCE_PATH (str): The API endpoint path for multi-search operations. + api_call (ApiCall): The ApiCall instance for making API requests. + """ + + resource_path: typing.Final[str] = "/multi_search" def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the MultiSearch instance. + + Args: + api_call (ApiCall): The ApiCall instance for making API requests. + """ self.api_call = api_call def perform( @@ -22,13 +59,33 @@ def perform( search_queries: MultiSearchRequestSchema, common_params: typing.Union[MultiSearchCommonParameters, None] = None, ) -> MultiSearchResponse: + """ + Perform a multi-search operation. + + This method allows executing multiple search queries in a single API call. + It processes the search parameters, sends the request to the Typesense API, + and returns the multi-search response. + + Args: + search_queries (MultiSearchRequestSchema): + A dictionary containing the list of search queries to perform. + The dictionary should have a 'searches' key with a list of search + parameter dictionaries. + common_params (Union[MultiSearchCommonParameters, None], optional): + Common parameters to apply to all search queries. Defaults to None. + + Returns: + MultiSearchResponse: + The response from the multi-search operation, containing + the results of all search queries. + """ stringified_search_params = [ stringify_search_params(search_params) for search_params in search_queries.get("searches") ] search_body = {"searches": stringified_search_params} response: MultiSearchResponse = self.api_call.post( - MultiSearch.RESOURCE_PATH, + MultiSearch.resource_path, body=search_body, params=common_params, as_json=True, From 15cbbd7f7606d12349dcc18c1267e285c678da96 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 14:27:51 +0300 Subject: [PATCH 188/288] refactor(collection): format collection classes based on linting rules --- src/typesense/collection.py | 112 +++++++++++++++++++++++++++++------ src/typesense/collections.py | 104 +++++++++++++++++++++++++++++--- src/typesense/document.py | 2 +- src/typesense/documents.py | 2 +- src/typesense/override.py | 2 +- src/typesense/overrides.py | 2 +- src/typesense/synonym.py | 2 +- 7 files changed, 195 insertions(+), 31 deletions(-) diff --git a/src/typesense/collection.py b/src/typesense/collection.py index 9ba00f3..f648ebf 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -1,4 +1,22 @@ -from __future__ import annotations +""" +This module provides functionality for managing individual collections in the Typesense API. + +It contains the Collection class, which allows for retrieving, updating, and deleting +collections, as well as managing documents, overrides, and synonyms within a collection. + +Classes: + Collection: Manages operations on a single collection in the Typesense API. + +Dependencies: + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.documents: Provides the Documents class for managing documents. + - typesense.overrides: Provides the Overrides class for managing overrides. + - typesense.synonyms: Provides the Synonyms class for managing synonyms. + - typesense.types.collection: Provides CollectionSchema and CollectionUpdateSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" import sys @@ -10,36 +28,69 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.documents import Documents +from typesense.overrides import Overrides +from typesense.synonyms import Synonyms from typesense.types.document import DocumentSchema -from .documents import Documents -from .overrides import Overrides -from .synonyms import Synonyms - TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) class Collection(typing.Generic[TDoc]): + """ + Manages operations on a single collection in the Typesense API. + + This class provides methods to retrieve, update, and delete a collection, + as well as access to documents, overrides, and synonyms within the collection. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + name (str): The name of the collection. + api_call (ApiCall): The ApiCall instance for making API requests. + documents (Documents[TDoc]): Instance for managing documents in this collection. + overrides (Overrides): Instance for managing overrides in this collection. + synonyms (Synonyms): Instance for managing synonyms in this collection. + """ + def __init__(self, api_call: ApiCall, name: str): + """ + Initialize the Collection instance. + + Args: + api_call (ApiCall): The ApiCall instance for making API requests. + name (str): The name of the collection. + """ self.name = name self.api_call = api_call - self.documents = Documents[TDoc](api_call, name) + self.documents: Documents[TDoc] = Documents(api_call, name) self.overrides = Overrides(api_call, name) self.synonyms = Synonyms(api_call, name) - @property - def _endpoint_path(self) -> str: - from typesense.collections import Collections - - return f"{Collections.RESOURCE_PATH}/{self.name}" - def retrieve(self) -> CollectionSchema: + """ + Retrieve the schema of this collection from Typesense. + + Returns: + CollectionSchema: The schema of the collection. + """ response: CollectionSchema = self.api_call.get( - endpoint=self._endpoint_path, entity_type=CollectionSchema, as_json=True + endpoint=self._endpoint_path, + entity_type=CollectionSchema, + as_json=True, ) return response def update(self, schema_change: CollectionUpdateSchema) -> CollectionUpdateSchema: + """ + Update the schema of this collection in Typesense. + + Args: + schema_change (CollectionUpdateSchema): + The changes to apply to the collection schema. + + Returns: + CollectionUpdateSchema: The updated schema of the collection. + """ response: CollectionUpdateSchema = self.api_call.patch( endpoint=self._endpoint_path, body=schema_change, @@ -47,11 +98,38 @@ def update(self, schema_change: CollectionUpdateSchema) -> CollectionUpdateSchem ) return response - # There's currently no parameters passed to Collection deletions, but ensuring future compatibility def delete( self, - params: typing.Union[typing.Dict[str, typing.Union[str, bool]], None] = None, + delete_parameters: typing.Union[ + typing.Dict[str, typing.Union[str, bool]], + None, + ] = None, ) -> CollectionSchema: - return self.api_call.delete( - self._endpoint_path, entity_type=CollectionSchema, params=params + """ + Delete this collection from Typesense. + + Args: + delete_parameters (Union[Dict[str, Union[str, bool]], None], optional): + Additional parameters for the delete operation. Defaults to None. + + Returns: + CollectionSchema: The schema of the deleted collection. + """ + response: CollectionSchema = self.api_call.delete( + self._endpoint_path, + entity_type=CollectionSchema, + params=delete_parameters, ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Get the API endpoint path for this collection. + + Returns: + str: The full endpoint path for the collection. + """ + from typesense.collections import Collections + + return "/".join([Collections.resource_path, self.name]) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 2f12c91..723b2de 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -1,8 +1,22 @@ -from __future__ import annotations +""" +This module provides functionality for managing collections in the Typesense API. + +It contains the Collections class, which allows for creating, retrieving, and +accessing individual collections. + +Classes: + Collections: Manages collections in the Typesense API. + +Dependencies: + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.collection: Provides the Collection class for individual collection operations. + - typesense.types.collection: Provides CollectionCreateSchema and CollectionSchema types. + - typesense.types.document: Provides DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" import sys -from email.policy import default -from typing import TYPE_CHECKING if sys.version_info >= (3, 11): import typing @@ -10,31 +24,90 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.collection import Collection from typesense.types.collection import CollectionCreateSchema, CollectionSchema from typesense.types.document import DocumentSchema -from .collection import Collection - TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) class Collections(typing.Generic[TDoc]): - RESOURCE_PATH = "/collections" + """ + Manages collections in the Typesense API. + + This class provides methods to create, retrieve, and access individual collections. + It is generic over the document type TDoc, which should be a subtype of DocumentSchema. + + Attributes: + resource_path (str): The API endpoint path for collections operations. + api_call (ApiCall): The ApiCall instance for making API requests. + collections (Dict[str, Collection[TDoc]]): + A dictionary of Collection instances, keyed by collection name. + """ + + resource_path: typing.Final[str] = "/collections" def __init__(self, api_call: ApiCall): + """ + Initialize the Collections instance. + + Args: + api_call (ApiCall): The ApiCall instance for making API requests. + """ self.api_call = api_call self.collections: typing.Dict[str, Collection[TDoc]] = {} def __getitem__(self, collection_name: str) -> Collection[TDoc]: + """ + Get or create a Collection instance for a given collection name. + + This method allows accessing collections using dictionary-like syntax. + If the Collection instance doesn't exist, it creates a new one. + + Args: + collection_name (str): The name of the collection to access. + + Returns: + Collection[TDoc]: The Collection instance for the specified collection name. + + Example: + >>> collections = Collections(api_call) + >>> fruits_collection = collections['fruits'] + """ if not self.collections.get(collection_name): self.collections[collection_name] = Collection( - self.api_call, collection_name + self.api_call, + collection_name, ) return self.collections[collection_name] def create(self, schema: CollectionCreateSchema) -> CollectionSchema: + """ + Create a new collection in Typesense. + + Args: + schema (CollectionCreateSchema): + The schema defining the structure of the new collection. + + Returns: + CollectionSchema: + The schema of the created collection, as returned by the API. + + Example: + >>> collections = Collections(api_call) + >>> schema = { + ... "name": "companies", + ... "fields": [ + ... {"name": "company_name", "type": "string" }, + ... {"name": "num_employees", "type": "int32" }, + ... {"name": "country", "type": "string", "facet": True } + ... ], + ... "default_sorting_field": "num_employees" + ... } + >>> created_schema = collections.create(schema) + """ call: CollectionSchema = self.api_call.post( - endpoint=Collections.RESOURCE_PATH, + endpoint=Collections.resource_path, entity_type=CollectionSchema, as_json=True, body=schema, @@ -42,8 +115,21 @@ def create(self, schema: CollectionCreateSchema) -> CollectionSchema: return call def retrieve(self) -> typing.List[CollectionSchema]: + """ + Retrieve all collections from Typesense. + + Returns: + List[CollectionSchema]: + A list of schemas for all collections in the Typesense instance. + + Example: + >>> collections = Collections(api_call) + >>> all_collections = collections.retrieve() + >>> for collection in all_collections: + ... print(collection['name']) + """ call: typing.List[CollectionSchema] = self.api_call.get( - endpoint=Collections.RESOURCE_PATH, + endpoint=Collections.resource_path, as_json=True, entity_type=typing.List[CollectionSchema], ) diff --git a/src/typesense/document.py b/src/typesense/document.py index 7142170..a707ee4 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -127,7 +127,7 @@ def _endpoint_path(self) -> str: return "/".join( [ - Collections.RESOURCE_PATH, + Collections.resource_path, self.collection_name, Documents.resource_path, self.document_id, diff --git a/src/typesense/documents.py b/src/typesense/documents.py index 8c27338..2eb771c 100644 --- a/src/typesense/documents.py +++ b/src/typesense/documents.py @@ -396,7 +396,7 @@ def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: action = action or "" return "/".join( [ - Collections.RESOURCE_PATH, + Collections.resource_path, self.collection_name, self.resource_path, action, diff --git a/src/typesense/override.py b/src/typesense/override.py index 651039f..478a6d8 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -95,7 +95,7 @@ def _endpoint_path(self) -> str: return "/".join( [ - Collections.RESOURCE_PATH, + Collections.resource_path, self.collection_name, Overrides.resource_path, self.override_id, diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index 89776e8..2674f42 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -141,7 +141,7 @@ def _endpoint_path(self, override_id: typing.Union[str, None] = None) -> str: return "/".join( [ - Collections.RESOURCE_PATH, + Collections.resource_path, self.collection_name, Overrides.resource_path, override_id, diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 9d2ef9d..096affc 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -89,7 +89,7 @@ def _endpoint_path(self) -> str: return "/".join( [ - Collections.RESOURCE_PATH, + Collections.resource_path, self.collection_name, Synonyms.resource_path, self.synonym_id, From a2ff85bc7c09a98df3443335396867bb134ef06a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:31:25 +0300 Subject: [PATCH 189/288] refactor(client): format client based on linting rules --- src/typesense/client.py | 114 +++++++++++++++++++++++++++++++++++----- 1 file changed, 101 insertions(+), 13 deletions(-) diff --git a/src/typesense/client.py b/src/typesense/client.py index c4045da..557b45f 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -1,3 +1,30 @@ +""" +This module provides the main client interface for interacting with the Typesense API. + +It contains the Client class, which serves as the entry point for all Typesense operations, +integrating various components like collections, multi-search, keys, aliases, analytics, etc. + +Classes: + Client: The main client class for interacting with Typesense. + +Dependencies: + - typesense.aliases: Provides the Aliases class. + - typesense.analytics: Provides the Analytics class. + - typesense.api_call: Provides the ApiCall class for making API requests. + - typesense.collection: Provides the Collection class. + - typesense.collections: Provides the Collections class. + - typesense.configuration: Provides Configuration and ConfigDict types. + - typesense.conversations_models: Provides the ConversationsModels class. + - typesense.debug: Provides the Debug class. + - typesense.keys: Provides the Keys class. + - typesense.multi_search: Provides the MultiSearch class. + - typesense.operations: Provides the Operations class. + - typesense.stopwords: Provides the Stopwords class. + - typesense.types.document: Provides the DocumentSchema type. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + import sys from typesense.types.document import DocumentSchema @@ -7,25 +34,61 @@ else: import typing_extensions as typing +from typesense.aliases import Aliases +from typesense.analytics import Analytics +from typesense.api_call import ApiCall from typesense.collection import Collection - -from .aliases import Aliases -from .analytics import Analytics -from .api_call import ApiCall -from .collections import Collections -from .configuration import ConfigDict, Configuration -from .conversations_models import ConversationsModels -from .debug import Debug -from .keys import Keys -from .multi_search import MultiSearch -from .operations import Operations -from .stopwords import Stopwords +from typesense.collections import Collections +from typesense.configuration import ConfigDict, Configuration +from typesense.conversations_models import ConversationsModels +from typesense.debug import Debug +from typesense.keys import Keys +from typesense.multi_search import MultiSearch +from typesense.operations import Operations +from typesense.stopwords import Stopwords TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) -class Client(object): +class Client: + """ + The main client class for interacting with Typesense. + + This class serves as the entry point for all Typesense operations. It initializes + and provides access to various components of the Typesense SDK, such as collections, + multi-search, keys, aliases, analytics, operations, debug, stopwords, + and conversation models. + + Attributes: + config (Configuration): The configuration object for the Typesense client. + api_call (ApiCall): The ApiCall instance for making API requests. + collections (Collections[DocumentSchema]): Instance for managing collections. + multi_search (MultiSearch): Instance for performing multi-search operations. + keys (Keys): Instance for managing API keys. + aliases (Aliases): Instance for managing collection aliases. + analytics (Analytics): Instance for analytics operations. + operations (Operations): Instance for various Typesense operations. + debug (Debug): Instance for debug operations. + stopwords (Stopwords): Instance for managing stopwords. + conversations_models (ConversationsModels): Instance for managing conversation models. + """ + def __init__(self, config_dict: ConfigDict) -> None: + """ + Initialize the Client instance. + + Args: + config_dict (ConfigDict): + A dictionary containing the configuration for the Typesense client. + + Example: + >>> config = { + ... "api_key": "your_api_key", + ... "nodes": [{"host": "localhost", "port": "8108", "protocol": "http"}], + ... "connection_timeout_seconds": 2 + ... } + >>> client = Client(config) + """ self.config = Configuration(config_dict) self.api_call = ApiCall(self.config) self.collections: Collections[DocumentSchema] = Collections(self.api_call) @@ -44,6 +107,31 @@ def typed_collection( model: typing.Type[TDoc], name: typing.Union[str, None] = None, ) -> Collection[TDoc]: + """ + Get a Collection instance for a specific document model. + + This method allows retrieving a Collection instance typed to a specific document model. + If no name is provided, it uses the lowercase name of the model class as + the collection name. + + Args: + model (Type[TDoc]): The document model class. + name (Union[str, None], optional): + The name of the collection. If None, uses the lowercase model class name. + + Returns: + Collection[TDoc]: A Collection instance typed to the specified document model. + + Example: + >>> class Company(DocumentSchema): + ... name: str + ... num_employees: int + ... + >>> client = Client(config) + >>> companies_collection = client.typed_collection(model=Company) + # This is equivalent to: + # companies_collection = client.typed_collection(model=Company, name="company") + """ if name is None: name = model.__name__.lower() collection: Collection[TDoc] = self.collections[name] From 91610139d1c4e0206c4248e27f347bf62c5f4904 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:35:47 +0300 Subject: [PATCH 190/288] fix(tests): use global fixtures for api call tests --- tests/api_call_test.py | 183 ++++++++++++++++------------------------- 1 file changed, 69 insertions(+), 114 deletions(-) diff --git a/tests/api_call_test.py b/tests/api_call_test.py index e49fb4c..3ae8b34 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -25,103 +25,58 @@ from typesense.logger import logger -@pytest.fixture(scope="function", name="config") -def config_fixture() -> Configuration: - """Return a Configuration object with test values.""" - return Configuration( - config_dict={ - "api_key": "test-api-key", - "nodes": [ - { - "host": "node0", - "port": 8108, - "protocol": "http", - }, - { - "host": "node1", - "port": 8108, - "protocol": "http", - }, - { - "host": "node2", - "port": 8108, - "protocol": "http", - }, - ], - "nearest_node": { - "host": "nearest", - "port": 8108, - "protocol": "http", - }, - "num_retries": 3, - "healthcheck_interval_seconds": 60, - "retry_interval_seconds": 0.001, - "connection_timeout_seconds": 0.001, - "verify": True, - }, - ) - - -@pytest.fixture(scope="function", name="api_call") -def api_call_fixture( - config: Configuration, -) -> ApiCall: - """Return an ApiCall object with test values.""" - return ApiCall(config) - - def test_initialization( - api_call: ApiCall, - config: Configuration, + fake_config: Configuration, ) -> None: """Test the initialization of the ApiCall object.""" - assert api_call.config == config - assert_object_lists_match(api_call.node_manager.nodes, config.nodes) - assert api_call.node_manager.node_index == 0 + fake_api_call = ApiCall(fake_config) + assert fake_api_call.config == fake_config + assert_object_lists_match(fake_api_call.node_manager.nodes, fake_config.nodes) + assert fake_api_call.node_manager.node_index == 0 def test_node_due_for_health_check( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it correctly identifies if a node is due for health check.""" node = Node(host="localhost", port=8108, protocol="http", path=" ") node.last_access_ts = time.time() - 61 - assert api_call.node_manager._is_due_for_health_check(node) is True + assert fake_api_call.node_manager._is_due_for_health_check(node) is True def test_get_node_nearest_healthy( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it correctly selects the nearest node if it is healthy.""" - node = api_call.node_manager.get_node() - assert_match_object(node, api_call.config.nearest_node) + node = fake_api_call.node_manager.get_node() + assert_match_object(node, fake_api_call.config.nearest_node) def test_get_node_nearest_not_healthy( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it selects the next available node if the nearest node is not healthy.""" - api_call.config.nearest_node.healthy = False - node = api_call.node_manager.get_node() - assert_match_object(node, api_call.node_manager.nodes[0]) + fake_api_call.config.nearest_node.healthy = False + node = fake_api_call.node_manager.get_node() + assert_match_object(node, fake_api_call.node_manager.nodes[0]) def test_get_node_round_robin_selection( - api_call: ApiCall, + fake_api_call: ApiCall, mocker: MockerFixture, ) -> None: """Test that it selects the next available node in a round-robin fashion.""" - api_call.config.nearest_node = None + fake_api_call.config.nearest_node = None mocker.patch("time.time", return_value=100) - node1 = api_call.node_manager.get_node() - assert_match_object(node1, api_call.config.nodes[0]) + node1 = fake_api_call.node_manager.get_node() + assert_match_object(node1, fake_api_call.config.nodes[0]) - node2 = api_call.node_manager.get_node() - assert_match_object(node2, api_call.config.nodes[1]) + node2 = fake_api_call.node_manager.get_node() + assert_match_object(node2, fake_api_call.config.nodes[1]) - node3 = api_call.node_manager.get_node() - assert_match_object(node3, api_call.config.nodes[2]) + node3 = fake_api_call.node_manager.get_node() + assert_match_object(node3, fake_api_call.config.nodes[2]) def test_get_exception() -> None: @@ -180,7 +135,7 @@ def test_normalize_params_with_no_booleans() -> None: assert parameter_dict == {"key1": "value", "key2": 123} -def test_make_request_as_json(api_call: ApiCall) -> None: +def test_make_request_as_json(fake_api_call: ApiCall) -> None: """Test the `make_request` method with JSON response.""" session = requests.sessions.Session() @@ -191,7 +146,7 @@ def test_make_request_as_json(api_call: ApiCall) -> None: status_code=200, ) - response = api_call._execute_request( + response = fake_api_call._execute_request( session.get, "/test", as_json=True, @@ -200,7 +155,7 @@ def test_make_request_as_json(api_call: ApiCall) -> None: assert response == {"key": "value"} -def test_make_request_as_text(api_call: ApiCall) -> None: +def test_make_request_as_text(fake_api_call: ApiCall) -> None: """Test the `make_request` method with text response.""" session = requests.sessions.Session() @@ -211,7 +166,7 @@ def test_make_request_as_text(api_call: ApiCall) -> None: status_code=200, ) - response = api_call._execute_request( + response = fake_api_call._execute_request( session.get, "/test", as_json=False, @@ -221,7 +176,7 @@ def test_make_request_as_text(api_call: ApiCall) -> None: def test_get_as_json( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the GET method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -230,7 +185,7 @@ def test_get_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.get( + assert fake_api_call.get( "/test", as_json=True, entity_type=typing.Dict[str, str], @@ -238,7 +193,7 @@ def test_get_as_json( def test_get_as_text( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the GET method with text response.""" with requests_mock.mock() as request_mocker: @@ -248,13 +203,13 @@ def test_get_as_text( status_code=200, ) assert ( - api_call.get("/test", as_json=False, entity_type=typing.Dict[str, str]) + fake_api_call.get("/test", as_json=False, entity_type=typing.Dict[str, str]) == "response text" ) def test_post_as_json( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the POST method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -263,7 +218,7 @@ def test_post_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.post( + assert fake_api_call.post( "/test", body={"data": "value"}, as_json=True, @@ -274,7 +229,7 @@ def test_post_as_json( def test_post_with_params( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that the parameters are correctly passed to the request.""" with requests_mock.Mocker() as request_mocker: @@ -286,7 +241,7 @@ def test_post_with_params( parameter_set = {"key1": [True, False], "key2": False, "key3": "value"} - post_result = api_call.post( + post_result = fake_api_call.post( "/test", params=parameter_set, body={"key": "value"}, @@ -307,7 +262,7 @@ def test_post_with_params( def test_post_as_text( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the POST method with text response.""" with requests_mock.mock() as request_mocker: @@ -316,7 +271,7 @@ def test_post_as_text( text="response text", status_code=200, ) - post_result = api_call.post( + post_result = fake_api_call.post( "/test", body={"data": "value"}, as_json=False, @@ -326,7 +281,7 @@ def test_post_as_text( def test_put_as_json( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the PUT method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -335,7 +290,7 @@ def test_put_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.put( + assert fake_api_call.put( "/test", body={"data": "value"}, entity_type=typing.Dict[str, str], @@ -343,7 +298,7 @@ def test_put_as_json( def test_patch_as_json( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the PATCH method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -352,7 +307,7 @@ def test_patch_as_json( json={"key": "value"}, status_code=200, ) - assert api_call.patch( + assert fake_api_call.patch( "/test", body={"data": "value"}, entity_type=typing.Dict[str, str], @@ -360,7 +315,7 @@ def test_patch_as_json( def test_delete_as_json( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test the DELETE method with JSON response.""" with requests_mock.mock() as request_mocker: @@ -370,12 +325,12 @@ def test_delete_as_json( status_code=200, ) - response = api_call.delete("/test", entity_type=typing.Dict[str, str]) + response = fake_api_call.delete("/test", entity_type=typing.Dict[str, str]) assert response == {"key": "value"} def test_raise_custom_exception_with_header( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -387,7 +342,7 @@ def test_raise_custom_exception_with_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call._execute_request( + fake_api_call._execute_request( requests.get, "/test", as_json=True, @@ -397,7 +352,7 @@ def test_raise_custom_exception_with_header( def test_raise_custom_exception_without_header( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it raises a custom exception with the error message.""" with requests_mock.mock() as request_mocker: @@ -408,7 +363,7 @@ def test_raise_custom_exception_without_header( ) with pytest.raises(exceptions.RequestMalformed) as exception: - api_call._execute_request( + fake_api_call._execute_request( requests.get, "/test", as_json=True, @@ -418,11 +373,11 @@ def test_raise_custom_exception_without_header( def test_selects_next_available_node_on_timeout( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it selects the next available node if the request times out.""" with requests_mock.mock() as request_mocker: - api_call.config.nearest_node = None + fake_api_call.config.nearest_node = None request_mocker.get( "http://node0:8108/test", exc=requests.exceptions.ConnectTimeout, @@ -437,7 +392,7 @@ def test_selects_next_available_node_on_timeout( status_code=200, ) - response = api_call.get( + response = fake_api_call.get( "/test", as_json=True, entity_type=typing.Dict[str, str], @@ -451,18 +406,18 @@ def test_selects_next_available_node_on_timeout( def test_get_node_no_healthy_nodes( - api_call: ApiCall, + fake_api_call: ApiCall, mocker: MockFixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test that it logs a message if no healthy nodes are found.""" - for api_node in api_call.node_manager.nodes: + for api_node in fake_api_call.node_manager.nodes: api_node.healthy = False - api_call.config.nearest_node.healthy = False + fake_api_call.config.nearest_node.healthy = False mocker.patch.object( - api_call.node_manager, + fake_api_call.node_manager, "_is_due_for_health_check", return_value=False, ) @@ -470,20 +425,20 @@ def test_get_node_no_healthy_nodes( # Need to set the logger level to DEBUG to capture the message logger.setLevel(logging.DEBUG) - selected_node = api_call.node_manager.get_node() + selected_node = fake_api_call.node_manager.get_node() with caplog.at_level(logging.DEBUG): assert "No healthy nodes were found. Returning the next node." in caplog.text assert ( - selected_node == api_call.node_manager.nodes[api_call.node_manager.node_index] + selected_node == fake_api_call.node_manager.nodes[fake_api_call.node_manager.node_index] ) - assert api_call.node_manager.node_index == 0 + assert fake_api_call.node_manager.node_index == 0 def test_raises_if_no_nodes_are_healthy_with_the_last_exception( - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it raises the last exception if no nodes are healthy.""" with requests_mock.mock() as request_mocker: @@ -496,12 +451,12 @@ def test_raises_if_no_nodes_are_healthy_with_the_last_exception( request_mocker.get("http://node2:8108/", exc=requests.exceptions.SSLError) with pytest.raises(requests.exceptions.SSLError): - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 mocker: MockerFixture, - api_call: ApiCall, + fake_api_call: ApiCall, ) -> None: """Test that it uses the nearest node if it is present and healthy.""" with requests_mock.Mocker() as request_mocker: @@ -527,15 +482,15 @@ def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to node2 and resolve the request: 1 request - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # Advance time by 5 seconds mocker.patch("time.time", return_value=current_time + 5) - api_call.get( + fake_api_call.get( "/", entity_type=typing.Dict[str, str], ) # 1 should go to node2 and resolve the request: 1 request @@ -547,7 +502,7 @@ def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 # 2 should go to node0, # 3 should go to node1, # 4 should go to node2 and resolve the request: 4 requests - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # Advance time by 185 seconds mocker.patch("time.time", return_value=current_time + 185) @@ -560,11 +515,11 @@ def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 ) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # 1 should go to nearest and resolve the request: 1 request - api_call.get("/", entity_type=typing.Dict[str, str]) + fake_api_call.get("/", entity_type=typing.Dict[str, str]) # Check the request history assert request_mocker.request_history[0].url == "http://nearest:8108/" @@ -587,13 +542,13 @@ def test_uses_nearest_node_if_present_and_healthy( # noqa: WPS213 assert request_mocker.request_history[13].url == "http://nearest:8108/" -def test_max_retries_no_last_exception(api_call: ApiCall) -> None: +def test_max_retries_no_last_exception(fake_api_call: ApiCall) -> None: """Test that it raises if the maximum number of retries is reached.""" with pytest.raises( exceptions.TypesenseClientError, match="All nodes are unhealthy", ): - api_call._execute_request( + fake_api_call._execute_request( requests.get, "/", as_json=True, From fc5d0d340f65dec423d61b509635c92c5db5058d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:36:40 +0300 Subject: [PATCH 191/288] style(test): use double quotes for config tests --- tests/configuration_validations_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/configuration_validations_test.py b/tests/configuration_validations_test.py index 99c6202..d408e05 100644 --- a/tests/configuration_validations_test.py +++ b/tests/configuration_validations_test.py @@ -44,7 +44,7 @@ def test_deprecation_warning_timeout_seconds(caplog: pytest.LogCaptureFixture) - } ConfigurationValidations.show_deprecation_warnings(config_dict) assert ( - ' '.join( + " ".join( [ "Deprecation warning: timeout_seconds is now renamed", "to connection_timeout_seconds", @@ -180,7 +180,7 @@ def test_validate_config_dict_with_wrong_nearest_node() -> None: """Test validate_config_dict with wrong nearest node.""" with pytest.raises( ConfigError, - match='`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol', # noqa: B950 + match="`nearest_node` entry must be a URL string or a dictionary with the following required keys: host, port, protocol", # noqa: B950 ): ConfigurationValidations.validate_config_dict( { From e4a59681fa45b8d1d553acbd382e8c8050b596ca Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:36:50 +0300 Subject: [PATCH 192/288] chore: add docstrings to modules --- src/typesense/types/__init__.py | 1 + tests/__init__.py | 1 + 2 files changed, 2 insertions(+) diff --git a/src/typesense/types/__init__.py b/src/typesense/types/__init__.py index e69de29..d0c03eb 100644 --- a/src/typesense/types/__init__.py +++ b/src/typesense/types/__init__.py @@ -0,0 +1 @@ +"""Types for the Typesense Python Client.""" diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..b12da74 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for the Typesense Python Client.""" From f42a9201035e10479d69ec20117b11b210868e26 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:37:34 +0300 Subject: [PATCH 193/288] chore: add flake8 config --- setup.cfg | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 6ba6a5a..ccbbe25 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,33 @@ [flake8] -max-line-length = 160 +# flake8 configuration: +# https://flake8.pycqa.org/en/latest/user/configuration.html +format = wemake +show-source = true +statistics = false +doctests = true +enable-extensions = G +max-line-length = 88 +extend-select = B950 +extend-ignore = E203,E501,E701 + +# darglint configuration: +# https://github.com/terrencepreilly/darglint +strictness = long +docstring-style = sphinx + +# Flake plugins: +max-complexity = 6 + +# # Excluding some directories: +exclude = .git,__pycache__,venv,.eggs,*.egg +ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201 +per-file-ignores = + tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 + src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202 + src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 + src/typesense/api_call.py: WPS110, WPS211 + src/typesense/request_handler.py: WPS110, WPS211 + [metadata] license_file = LICENSE From dccc250db559d39a4da4d417b69ccdf73b2aeadf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 16:37:46 +0300 Subject: [PATCH 194/288] chore: add coverage config --- pyproject.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 031da77..2c58e45 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,3 +32,10 @@ version = {attr = "typesense.__version__"} [tool.setuptools.packages.find] where = ["src"] + +[tool.coverage.run] +source = ["."] +omit = ["examples/*.py","./venv/*","tests/*/*.py","*__init__.py","*/*test.py", "./src/typesense/types/*.py"] + +[tool.coverage.report] +omit = ["examples/*.py","./venv/*","tests/*.py","*__init__.py","*/*test.py"] From df75f6868b8cc5fe9ef3408986b42d8f60ff4233 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 10 Sep 2024 17:04:53 +0300 Subject: [PATCH 195/288] chore: add py.typed file --- src/typesense/py.typed | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 src/typesense/py.typed diff --git a/src/typesense/py.typed b/src/typesense/py.typed new file mode 100644 index 0000000..e69de29 From 3311032afef9108ab11e6beb6f8bd2bf07bf6701 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 16 Sep 2024 16:57:12 +0300 Subject: [PATCH 196/288] feat: add additional headers to config and pass them through the request --- src/typesense/configuration.py | 4 ++++ src/typesense/request_handler.py | 6 +++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 6bff96f..96ea7e1 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -74,6 +74,8 @@ class ConfigDict(typing.TypedDict): master_node (typing.Union[str, NodeConfigDict], deprecated): A dictionary or URL that represents the master node. + additional_headers (dict): Additional headers to include in the request. + read_replica_nodes (list[typing.Union[str, NodeConfigDict]], deprecated): A list of dictionaries or URLs that represent the read replica nodes. """ @@ -87,6 +89,7 @@ class ConfigDict(typing.TypedDict): verify: typing.NotRequired[bool] timeout_seconds: typing.NotRequired[int] # deprecated master_node: typing.NotRequired[typing.Union[str, NodeConfigDict]] # deprecated + additional_headers: typing.NotRequired[typing.Dict[str, str]] read_replica_nodes: typing.NotRequired[ typing.List[typing.Union[str, NodeConfigDict]] ] # deprecated @@ -213,6 +216,7 @@ def __init__( 60, ) self.verify = config_dict.get("verify", True) + self.additional_headers = config_dict.get("additional_headers", {}) def _handle_nearest_node( self, diff --git a/src/typesense/request_handler.py b/src/typesense/request_handler.py index 3ef16ca..b9f822a 100644 --- a/src/typesense/request_handler.py +++ b/src/typesense/request_handler.py @@ -204,7 +204,11 @@ def make_request( Raises: TypesenseClientError: If the API returns an error response. """ - headers = {self.api_key_header_name: self.config.api_key} + headers = { + self.api_key_header_name: self.config.api_key, + } + headers.update(self.config.additional_headers) + kwargs.setdefault("headers", {}).update(headers) kwargs.setdefault("timeout", self.config.connection_timeout_seconds) kwargs.setdefault("verify", self.config.verify) From d790d41b5d239ab62f7e6bc42d175b4bd6417a9a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 16 Sep 2024 16:57:38 +0300 Subject: [PATCH 197/288] test: add tests for additional headers --- tests/api_call_test.py | 42 ++++++++++++++++++++++++++++++++++++- tests/configuration_test.py | 2 ++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 3ae8b34..caaa4a1 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -6,6 +6,7 @@ import sys import time +from isort import Config from pytest_mock import MockFixture if sys.version_info >= (3, 11): @@ -135,6 +136,43 @@ def test_normalize_params_with_no_booleans() -> None: assert parameter_dict == {"key1": "value", "key2": 123} +def test_additional_headers(fake_api_call: ApiCall) -> None: + """Test the `make_request` method with additional headers from the config.""" + session = requests.sessions.Session() + api_call = ApiCall( + Configuration( + { + "additional_headers": { + "AdditionalHeader1": "test", + "AdditionalHeader2": "test2", + }, + "api_key": "test-api", + "nodes": [ + "http://nearest:8108", + ], + }, + ), + ) + + with requests_mock.mock(session=session) as request_mocker: + request_mocker.get( + "http://nearest:8108/test", + json={"key": "value"}, + status_code=200, + ) + + api_call._execute_request( + session.get, + "/test", + as_json=True, + entity_type=typing.Dict[str, str], + ) + + request = request_mocker.request_history[-1] + assert request.headers["AdditionalHeader1"] == "test" + assert request.headers["AdditionalHeader2"] == "test2" + + def test_make_request_as_json(fake_api_call: ApiCall) -> None: """Test the `make_request` method with JSON response.""" session = requests.sessions.Session() @@ -172,6 +210,7 @@ def test_make_request_as_text(fake_api_call: ApiCall) -> None: as_json=False, entity_type=typing.Dict[str, str], ) + assert response == "response text" @@ -431,7 +470,8 @@ def test_get_node_no_healthy_nodes( assert "No healthy nodes were found. Returning the next node." in caplog.text assert ( - selected_node == fake_api_call.node_manager.nodes[fake_api_call.node_manager.node_index] + selected_node + == fake_api_call.node_manager.nodes[fake_api_call.node_manager.node_index] ) assert fake_api_call.node_manager.node_index == 0 diff --git a/tests/configuration_test.py b/tests/configuration_test.py index 120888f..da3166f 100644 --- a/tests/configuration_test.py +++ b/tests/configuration_test.py @@ -66,6 +66,7 @@ def test_configuration_explicit() -> None: "num_retries": 5, "retry_interval_seconds": 2.0, "verify": False, + "additional_headers": {"X-Test": "test", "X-Test2": "test2"}, } configuration = Configuration(config) @@ -82,6 +83,7 @@ def test_configuration_explicit() -> None: "num_retries": 5, "retry_interval_seconds": 2.0, "verify": False, + "additional_headers": {"X-Test": "test", "X-Test2": "test2"}, } assert_to_contain_object(configuration, expected) From 71fc13554024b4afd70e43a891d3beaf4344f1cd Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Mon, 16 Sep 2024 21:08:46 +0530 Subject: [PATCH 198/288] Bump version --- src/typesense/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 6e45291..9fb79c6 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,5 +1,5 @@ from .client import Client # NOQA -__version__ = '0.21.0' +__version__ = '1.0.0b1' From c7e33869215ceb3d7a2209f8e4d92ee307a3e439 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:08:47 +0200 Subject: [PATCH 199/288] fix(api-call): remove unpack and use typeddict directly --- src/typesense/api_call.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 47d0eec..ca4d303 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -334,7 +334,7 @@ def _execute_request( as_json: typing.Literal[True], last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> TEntityDict: """ Execute a request to the Typesense API with retry logic. @@ -373,7 +373,7 @@ def _execute_request( as_json: typing.Literal[False], last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> str: """ Execute a request to the Typesense API with retry logic. @@ -411,7 +411,7 @@ def _execute_request( as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> typing.Union[TEntityDict, str]: """ Execute a request to the Typesense API with retry logic. @@ -493,7 +493,7 @@ def _make_request_and_process_response( def _prepare_request_params( self, endpoint: str, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> typing.Tuple[Node, str, SessionFunctionKwargs[TParams, TBody]]: node = self.node_manager.get_node() url = node.url() + endpoint From 9fc1a9179902e17b81845386475977e669e0f25d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:08:19 +0200 Subject: [PATCH 200/288] fix: update typesense version to 27.1 --- tests/debug_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/debug_test.py b/tests/debug_test.py index d491d17..efe8e92 100644 --- a/tests/debug_test.py +++ b/tests/debug_test.py @@ -30,7 +30,7 @@ def test_init(fake_api_call: ApiCall) -> None: def test_retrieve(fake_debug: Debug) -> None: """Test that the Debug object can retrieve a debug.""" - json_response: DebugResponseSchema = {"state": 1, "version": "27.0"} + json_response: DebugResponseSchema = {"state": 1, "version": "27.1"} with requests_mock.Mocker() as mock: mock.get( @@ -48,7 +48,7 @@ def test_retrieve(fake_debug: Debug) -> None: def test_actual_retrieve(actual_debug: Debug) -> None: """Test that the Debug object can retrieve a debug on Typesense server.""" - json_response: DebugResponseSchema = {"state": 1, "version": "27.0"} + json_response: DebugResponseSchema = {"state": 1, "version": "27.1"} response = actual_debug.retrieve() From 550405872afca842e64ead24841e22b33e76ce37 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:09:04 +0200 Subject: [PATCH 201/288] ci: add test and lint workflow --- .github/workflows/test-and-lint.yml | 57 +++++++++++++++++++++++++++++ setup.cfg | 2 +- 2 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/test-and-lint.yml diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml new file mode 100644 index 0000000..fbca513 --- /dev/null +++ b/.github/workflows/test-and-lint.yml @@ -0,0 +1,57 @@ +name: Test and Lint + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + quality: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + services: + typesense: + image: typesense/typesense:27.1 + ports: + - 8108:8108 + volumes: + - /tmp/typesense-data:/data + - /tmp/typesense-analytics:/analytics + env: + TYPESENSE_API_KEY: xyz + TYPESENSE_DATA_DIR: /data + TYPESENSE_ENABLE_CORS: true + TYPESENSE_ANALYTICS_DIR: /analytics + TYPESENSE_ENABLE_SEARCH_ANALYTICS: true + + steps: + - name: Wait for Typesense + run: | + timeout 20 bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8108/health)" != "200" ]]; do sleep 1; done' || false + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/dev.txt + + - name: Lint with Flake8 + run: | + flake8 src/typesense + + - name: Check types with mypy + run: | + mypy src/typesense + + - name: Run tests and coverage (excluding OpenAI) + run: | + coverage run -m pytest -m "not open_ai" diff --git a/setup.cfg b/setup.cfg index ccbbe25..b6d919d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -19,7 +19,7 @@ docstring-style = sphinx max-complexity = 6 # # Excluding some directories: -exclude = .git,__pycache__,venv,.eggs,*.egg +exclude = .git,__pycache__,venv,.eggs,*.egg,src/typesense/__init__.py ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201 per-file-ignores = tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 From 8e0e2d186008ea9f7eff63011a409f501c4c86c4 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:12:26 +0200 Subject: [PATCH 202/288] fix: fix branch name on testing workflow --- .github/workflows/test-and-lint.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index fbca513..45b96d7 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -2,9 +2,9 @@ name: Test and Lint on: push: - branches: [ main ] + branches: [ master ] pull_request: - branches: [ main ] + branches: [ master ] jobs: quality: From a00f6bcf2aa92d75f8e89bcecf9b805ab9fa45e7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:35:58 +0200 Subject: [PATCH 203/288] ci(release): add PyPI release workflow --- .github/workflows/release.yml | 61 +++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 .github/workflows/release.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..ce1f9bc --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,61 @@ +name: Publish Package to PyPI +on: + push: + tags: + - 'v*' +jobs: + tests: + name: Run Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: 'pip' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements/dev.txt + + - name: Run tests (excluding OpenAI) + run: coverage run -m pytest -m "not open_ai" + + - name: Show coverage report + run: coverage report + build-n-publish: + name: Build and Publish + needs: tests + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/typesense + permissions: + id-token: write + contents: read + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + cache: pip + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: | + rm -rf dist/ + python -m build + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + attestations: true From feed1e709a2f92374bd670fdf98eb6247bd32f78 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 15 Nov 2024 13:43:09 +0200 Subject: [PATCH 204/288] chore: remove publish shell script --- publish.sh | 4 ---- 1 file changed, 4 deletions(-) delete mode 100755 publish.sh diff --git a/publish.sh b/publish.sh deleted file mode 100755 index 979ffdf..0000000 --- a/publish.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash -rm -rf dist/* -python3 -m build -twine upload dist/* From dc013f4c814774bfa5ecf9454aa3564798570c3d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 16:14:07 +0200 Subject: [PATCH 205/288] feat(client): add stemming dictionary support - add new `stemming` module with dictionary management functionality - add `StemmingDictionaries` class for crud operations on dictionaries - add corresponding types and tests for stemming operations - update `Client` class to expose stemming functionality --- setup.cfg | 5 +- src/typesense/client.py | 5 +- src/typesense/stemming.py | 50 +++++++ src/typesense/stemming_dictionaries.py | 187 +++++++++++++++++++++++++ src/typesense/stemming_dictionary.py | 75 ++++++++++ src/typesense/types/stemming.py | 45 ++++++ tests/fixtures/stemming_fixtures.py | 14 ++ tests/stemming_test.py | 40 ++++++ 8 files changed, 418 insertions(+), 3 deletions(-) create mode 100644 src/typesense/stemming.py create mode 100644 src/typesense/stemming_dictionaries.py create mode 100644 src/typesense/stemming_dictionary.py create mode 100644 src/typesense/types/stemming.py create mode 100644 tests/fixtures/stemming_fixtures.py create mode 100644 tests/stemming_test.py diff --git a/setup.cfg b/setup.cfg index ccbbe25..c440de4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,11 +20,12 @@ max-complexity = 6 # # Excluding some directories: exclude = .git,__pycache__,venv,.eggs,*.egg -ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201 +ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201, E704, D102 per-file-ignores = tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202 - src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 + src/typesense/documents.py: WPS320, WPS428, WPS220 + src/typesense/stemming_dictionaries.py: WPS320, WPS428, WPS220 src/typesense/api_call.py: WPS110, WPS211 src/typesense/request_handler.py: WPS110, WPS211 diff --git a/src/typesense/client.py b/src/typesense/client.py index 557b45f..dc7d09b 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -45,6 +45,7 @@ from typesense.keys import Keys from typesense.multi_search import MultiSearch from typesense.operations import Operations +from typesense.stemming import Stemming from typesense.stopwords import Stopwords TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) @@ -56,7 +57,7 @@ class Client: This class serves as the entry point for all Typesense operations. It initializes and provides access to various components of the Typesense SDK, such as collections, - multi-search, keys, aliases, analytics, operations, debug, stopwords, + multi-search, keys, aliases, analytics, stemming, operations, debug, stopwords, and conversation models. Attributes: @@ -67,6 +68,7 @@ class Client: keys (Keys): Instance for managing API keys. aliases (Aliases): Instance for managing collection aliases. analytics (Analytics): Instance for analytics operations. + stemming (Stemming): Instance for stemming dictionary operations. operations (Operations): Instance for various Typesense operations. debug (Debug): Instance for debug operations. stopwords (Stopwords): Instance for managing stopwords. @@ -96,6 +98,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) self.analytics = Analytics(self.api_call) + self.stemming = Stemming(self.api_call) self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) diff --git a/src/typesense/stemming.py b/src/typesense/stemming.py new file mode 100644 index 0000000..b6845da --- /dev/null +++ b/src/typesense/stemming.py @@ -0,0 +1,50 @@ +""" +Module for managing stemming dictionaries in Typesense. + +This module provides a class for managing stemming dictionaries in Typesense, +including creating, updating, and retrieving them. + +Classes: + - Stemming: Handles operations related to stemming dictionaries. + +Attributes: + - StemmingDictionaries: The StemmingDictionaries object for managing stemming dictionaries. + +Methods: + - __init__: Initializes the Stemming object. + +The Stemming class interacts with the Typesense API to manage stemming dictionary operations. +It provides access to the StemmingDictionaries object for managing stemming dictionaries. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typesense.api_call import ApiCall +from typesense.stemming_dictionaries import StemmingDictionaries + + +class Stemming(object): + """ + Class for managing stemming dictionaries in Typesense. + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + dictionaries (StemmingDictionaries): The StemmingDictionaries object for managing + stemming dictionaries. + """ + + def __init__(self, api_call: ApiCall): + """ + Initialize the Stemming object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.dictionaries = StemmingDictionaries(api_call) diff --git a/src/typesense/stemming_dictionaries.py b/src/typesense/stemming_dictionaries.py new file mode 100644 index 0000000..01471f1 --- /dev/null +++ b/src/typesense/stemming_dictionaries.py @@ -0,0 +1,187 @@ +""" +Module for interacting with the stemming dictionaries endpoint of the Typesense API. + +This module provides a class for managing stemming dictionaries in Typesense, including creating +and updating them. + +Classes: + - StemmingDictionaries: Handles operations related to stemming dictionaries. + +Methods: + - __init__: Initializes the StemmingDictionaries object. + - __getitem__: Retrieves or creates a StemmingDictionary object for a given dictionary_id. + - upsert: Creates or updates a stemming dictionary. + - _upsert_list: Creates or updates a list of stemming dictionaries. + - _dump_to_jsonl: Dumps a list of StemmingDictionaryCreateSchema objects to a JSONL string. + - _parse_response: Parses the response from the upsert operation. + - _upsert_raw: Performs the raw upsert operation. + - _endpoint_path: Constructs the API endpoint path for this specific stemming dictionary. + +The StemmingDictionaries class interacts with the Typesense API to manage stemming dictionary +operations. +It provides methods to create, update, and retrieve stemming dictionaries, as well as +access individual StemmingDictionary objects. + +For more information on stemming dictionaries, +refer to the Stemming [documentation](https://typesense.org/docs/28.0/api/stemming.html) +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +import json + +from typesense.api_call import ApiCall +from typesense.stemming_dictionary import StemmingDictionary +from typesense.types.stemming import ( + StemmingDictionariesRetrieveSchema, + StemmingDictionaryCreateSchema, +) + + +class StemmingDictionaries: + """ + Class for managing stemming dictionaries in Typesense. + + This class provides methods to interact with stemming dictionaries, including + creating, updating, and retrieving them. + + Attributes: + api_call (ApiCall): The API call object for making requests. + stemming_dictionaries (Dict[str, StemmingDictionary]): A dictionary of + StemmingDictionary objects. + """ + + resource_path: typing.Final[str] = "/stemming/dictionaries" + + def __init__(self, api_call: ApiCall): + """ + Initialize the StemmingDictionaries object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.stemming_dictionaries: typing.Dict[str, StemmingDictionary] = {} + + def __getitem__(self, dictionary_id: str) -> StemmingDictionary: + """ + Get or create an StemmingDictionary object for a given rule_id. + + Args: + rule_id (str): The ID of the analytics rule. + + Returns: + StemmingDictionary: The StemmingDictionary object for the given ID. + """ + if not self.stemming_dictionaries.get(dictionary_id): + self.stemming_dictionaries[dictionary_id] = StemmingDictionary( + self.api_call, + dictionary_id, + ) + return self.stemming_dictionaries[dictionary_id] + + def retrieve(self) -> StemmingDictionariesRetrieveSchema: + """ + Retrieve the list of stemming dictionaries. + + Returns: + StemmingDictionariesRetrieveSchema: The list of stemming dictionaries. + """ + response: StemmingDictionariesRetrieveSchema = self.api_call.get( + self._endpoint_path(), + entity_type=StemmingDictionariesRetrieveSchema, + ) + return response + + @typing.overload + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[str, bytes], + ) -> str: ... + + @typing.overload + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: ... + + def upsert( + self, + dictionary_id: str, + word_root_combinations: typing.Union[ + typing.List[StemmingDictionaryCreateSchema], + str, + bytes, + ], + ) -> typing.Union[str, typing.List[StemmingDictionaryCreateSchema]]: + if isinstance(word_root_combinations, (str, bytes)): + return self._upsert_raw(dictionary_id, word_root_combinations) + + return self._upsert_list(dictionary_id, word_root_combinations) + + def _upsert_list( + self, + dictionary_id: str, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> typing.List[StemmingDictionaryCreateSchema]: + word_combos_in_jsonl = self._dump_to_jsonl(word_root_combinations) + response = self._upsert_raw(dictionary_id, word_combos_in_jsonl) + return self._parse_response(response) + + def _dump_to_jsonl( + self, + word_root_combinations: typing.List[StemmingDictionaryCreateSchema], + ) -> str: + word_root_strs = [json.dumps(combo) for combo in word_root_combinations] + + return "\n".join(word_root_strs) + + def _parse_response( + self, + response: str, + ) -> typing.List[StemmingDictionaryCreateSchema]: + object_list: typing.List[StemmingDictionaryCreateSchema] = [] + + for line in response.split("\n"): + try: + decoded = json.loads(line) + except json.JSONDecodeError: + raise ValueError(f"Failed to parse JSON from response: {line}") + object_list.append(decoded) + return object_list + + def _upsert_raw( + self, + dictionary_id: str, + word_root_combinations: typing.Union[bytes, str], + ) -> str: + response: str = self.api_call.post( + self._endpoint_path("import"), + body=word_root_combinations, + as_json=False, + entity_type=str, + params={"id": dictionary_id}, + ) + return response + + def _endpoint_path(self, action: typing.Union[str, None] = None) -> str: + """ + Construct the API endpoint path for this specific stemming dictionary. + + Args: + action (str, optional): The action to perform on the stemming dictionary. + Defaults to None. + + Returns: + str: The constructed endpoint path. + """ + if action: + return f"{StemmingDictionaries.resource_path}/{action}" + return StemmingDictionaries.resource_path diff --git a/src/typesense/stemming_dictionary.py b/src/typesense/stemming_dictionary.py new file mode 100644 index 0000000..f528fc1 --- /dev/null +++ b/src/typesense/stemming_dictionary.py @@ -0,0 +1,75 @@ +""" +Module for managing individual stemming dictionaries in Typesense. + +This module provides a class for managing individual stemming dictionaries in Typesense, +including retrieving them. + +Classes: + - StemmingDictionary: Handles operations related to individual stemming dictionaries. + +Methods: + - __init__: Initializes the StemmingDictionary object. + - retrieve: Retrieves this specific stemming dictionary. + +The StemmingDictionary class interacts with the Typesense API to manage operations on a +specific stemming dictionary. It provides methods to retrieve the dictionary details. + +For more information on stemming dictionaries, refer to the Stemming +[documentation](https://typesense.org/docs/28.0/api/stemming.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typesense.api_call import ApiCall +from typesense.types.stemming import StemmingDictionarySchema + + +class StemmingDictionary: + """ + Class for managing individual stemming dictionaries in Typesense. + + This class provides methods to interact with a specific stemming dictionary, + including retrieving it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + + def __init__(self, api_call: ApiCall, dict_id: str): + """ + Initialize the StemmingDictionary object. + + Args: + api_call (ApiCall): The API call object for making requests. + dict_id (str): The ID of the stemming dictionary. + """ + self.api_call = api_call + self.dict_id = dict_id + + def retrieve(self) -> StemmingDictionarySchema: + """ + Retrieve this specific stemming dictionary. + + Returns: + StemmingDictionarySchema: The schema containing the stemming dictionary details. + """ + response: StemmingDictionarySchema = self.api_call.get( + self._endpoint_path, + entity_type=StemmingDictionarySchema, + as_json=True, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific analytics rule. + + Returns: + str: The constructed endpoint path. + """ + from typesense.stemming_dictionaries import StemmingDictionaries + + return "/".join([StemmingDictionaries.resource_path, self.dict_id]) diff --git a/src/typesense/types/stemming.py b/src/typesense/types/stemming.py new file mode 100644 index 0000000..2cc3dd9 --- /dev/null +++ b/src/typesense/types/stemming.py @@ -0,0 +1,45 @@ +"""Stemming types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class StemmingDictionaryCreateSchema(typing.TypedDict): + """ + Schema for creating a [stemming dictionary](https://typesense.org/docs/28/api/stemming.html#creating-a-stemming-dictionary). + + Attributes: + name (str): The name of the stemming dictionary. + words (list[str]): The list of words in the stemming dictionary. + """ + + word: str + root: str + + +class StemmingDictionarySchema(typing.TypedDict): + """ + Schema for a stemming dictionary. + + Attributes: + id (str): The ID of the stemming dictionary. + words (list[StemmingDictionarySchema]): The list of words and their roots in the stemming dictionary. + """ + + id: str + words: typing.List[StemmingDictionaryCreateSchema] + + +class StemmingDictionariesRetrieveSchema(typing.TypedDict): + """ + Schema for retrieving stemming dictionaries. + + Attributes: + data (list[str]): The list of stemming dictionary names. + """ + + dictionaries: typing.List[str] diff --git a/tests/fixtures/stemming_fixtures.py b/tests/fixtures/stemming_fixtures.py new file mode 100644 index 0000000..be571ed --- /dev/null +++ b/tests/fixtures/stemming_fixtures.py @@ -0,0 +1,14 @@ +"""Fixtures for the Analytics Rules tests.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.stemming import Stemming + + +@pytest.fixture(scope="function", name="actual_stemming") +def actual_stemming_fixture( + actual_api_call: ApiCall, +) -> Stemming: + """Return a Stemming object using a real API.""" + return Stemming(actual_api_call) diff --git a/tests/stemming_test.py b/tests/stemming_test.py new file mode 100644 index 0000000..9c0a812 --- /dev/null +++ b/tests/stemming_test.py @@ -0,0 +1,40 @@ +"""Tests for stemming.""" + +from typesense.stemming import Stemming + + +def test_actual_upsert( + actual_stemming: Stemming, +) -> None: + """Test that it can upsert a stemming dictionary to Typesense Server.""" + response = actual_stemming.dictionaries.upsert( + "set_1", + [{"word": "running", "root": "run"}, {"word": "fishing", "root": "fish"}], + ) + + assert response == [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ] + + +def test_actual_retrieve_many( + actual_stemming: Stemming, +) -> None: + """Test that it can retrieve all stemming dictionaries from Typesense Server.""" + response = actual_stemming.dictionaries.retrieve() + assert response == {"dictionaries": ["set_1"]} + + +def test_actual_retrieve( + actual_stemming: Stemming, +) -> None: + """Test that it can retrieve a single stemming dictionary from Typesense Server.""" + response = actual_stemming.dictionaries["set_1"].retrieve() + assert response == { + "id": "set_1", + "words": [ + {"word": "running", "root": "run"}, + {"word": "fishing", "root": "fish"}, + ], + } From 89c19be3e2c1a71451ffe5b4ecd23db55c0b50d9 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:26:45 +0200 Subject: [PATCH 206/288] feat(types): add geopolygon type to collection field types --- src/typesense/types/collection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index b2366dc..39ea2c1 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -17,6 +17,7 @@ "float", "bool", "geopoint", + "geopolygon", "geopoint[]", "string[]", "int32[]", From 738f5a4e6e4c40e26f23d00e7932e0789656a3bb Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:27:24 +0200 Subject: [PATCH 207/288] feat(collections): add collection truncation option --- src/typesense/types/document.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 2bebc85..701e75f 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -830,11 +830,13 @@ class DeleteQueryParameters(typing.TypedDict): Parameters for deleting documents. Attributes: + truncate (str): Truncate the collection, keeping just the schema. filter_by (str): Filter to apply to documents. batch_size (int): Batch size for deleting documents. ignore_not_found (bool): Ignore not found documents. """ + truncate: typing.NotRequired[bool] filter_by: str batch_size: typing.NotRequired[int] ignore_not_found: typing.NotRequired[bool] From edd41e796073eed82ec42fc3758551103701f08f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:27:44 +0200 Subject: [PATCH 208/288] feat(multi-search): add union parameter to multisearch --- src/typesense/types/multi_search.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/multi_search.py b/src/typesense/types/multi_search.py index 392f129..3619c0b 100644 --- a/src/typesense/types/multi_search.py +++ b/src/typesense/types/multi_search.py @@ -29,4 +29,5 @@ class MultiSearchRequestSchema(typing.TypedDict): searches (list[MultiSearchParameters]): The search parameters. """ + union: typing.NotRequired[typing.Literal[True]] searches: typing.List[MultiSearchParameters] From 19bc04dd49f2a72db0ae2c2a036104f846276047 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:28:17 +0200 Subject: [PATCH 209/288] feat(operations): add the `schema_changes` operation option --- src/typesense/operations.py | 31 ++++++++++++++++++++++++++++--- src/typesense/types/operations.py | 15 +++++++++++++++ 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/src/typesense/operations.py b/src/typesense/operations.py index a2e7782..4332089 100644 --- a/src/typesense/operations.py +++ b/src/typesense/operations.py @@ -22,6 +22,7 @@ HealthCheckResponse, LogSlowRequestsTimeParams, OperationResponse, + SchemaChangesResponse, SnapshotParameters, ) @@ -48,8 +49,9 @@ class Operations: """ resource_path: typing.Final[str] = "/operations" - healht_path: typing.Final[str] = "/health" + health_path: typing.Final[str] = "/health" config_path: typing.Final[str] = "/config" + schema_changes: typing.Final[str] = "/schema_changes" def __init__(self, api_call: ApiCall): """ @@ -60,6 +62,23 @@ def __init__(self, api_call: ApiCall): """ self.api_call = api_call + @typing.overload + def perform( + self, + operation_name: typing.Literal["schema_changes"], + query_params: None = None, + ) -> typing.List[SchemaChangesResponse]: + """ + Perform a vote operation. + + Args: + operation_name (Literal["schema_changes"]): The name of the operation. + query_params (None, optional): Query parameters (not used for vote operation). + + Returns: + OperationResponse: The response from the vote operation. + """ + @typing.overload def perform( self, @@ -150,7 +169,13 @@ def perform( def perform( self, operation_name: typing.Union[ - typing.Literal["snapshot, vote, db/compact, cache/clear"], + typing.Literal[ + "snapshot", + "vote", + "db/compact", + "cache/clear", + "schema_changes", + ], str, ], query_params: typing.Union[ @@ -189,7 +214,7 @@ def is_healthy(self) -> bool: bool: True if the server is healthy, False otherwise. """ call_resp = self.api_call.get( - Operations.healht_path, + Operations.health_path, as_json=True, entity_type=HealthCheckResponse, ) diff --git a/src/typesense/types/operations.py b/src/typesense/types/operations.py index 566f517..e2a03a3 100644 --- a/src/typesense/types/operations.py +++ b/src/typesense/types/operations.py @@ -41,6 +41,21 @@ class HealthCheckResponse(typing.TypedDict): ok: bool +class SchemaChangesResponse(typing.TypedDict): + """ + Response schema for schema changes. + + Attributes: + collection (str): The name of the collection. + validated_docs (int): The number of validated documents. + altered_docs (int): The number of altered documents + """ + + collection: str + validated_docs: int + altered_docs: int + + class OperationResponse(typing.TypedDict): """ Response schema for operations. From 0325b7d86de06505659569b750efd874d524e37f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:30:34 +0200 Subject: [PATCH 210/288] feat(field): add `tokens_separators` and `symbols_to_index` to field-level --- src/typesense/types/collection.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 39ea2c1..e929fa6 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -47,6 +47,8 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals optional (bool): Whether the field is optional. infix (bool): Whether the field is an infix. stem (bool): Whether the field is a stem. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. locale (Locales): The locale of the field. sort (bool): Whether the field is sortable. store (bool): Whether the field is stored. @@ -65,6 +67,8 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals locale: typing.NotRequired[Locales] sort: typing.NotRequired[bool] store: typing.NotRequired[bool] + symbols_to_index: typing.NotRequired[typing.List[str]] + token_separators: typing.NotRequired[typing.List[str]] num_dim: typing.NotRequired[float] range_index: typing.NotRequired[bool] index: typing.NotRequired[bool] @@ -84,6 +88,8 @@ class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): stem (bool): Whether the field is a stem. locale (Locales): The locale of the field. sort (bool): Whether the field is sortable. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. store (bool): Whether the field is stored. num_dim (float): The number of dimensions. range_index (bool): Whether the field is a range index. @@ -102,6 +108,8 @@ class ReferenceCollectionFieldSchema(CollectionFieldSchema[_ReferenceFieldType]) facet (bool): Whether the field is a facet. optional (bool): Whether the field is optional. infix (bool): Whether the field is an infix. + symbols_to_index (list[str]): The symbols to index + token_separators (list[str]): The token separators. stem (bool): Whether the field is a stem. locale (Locales): The locale of the field. sort (bool): Whether the field is sortable. From 1bbc7e8f4a02cebd6c3fd85f25fa5a4c359b4eaa Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:33:17 +0200 Subject: [PATCH 211/288] feat(multi-search): add `rerank_hybrid_searches` parameter to multisearch --- src/typesense/types/document.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 701e75f..529e08d 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -569,6 +569,7 @@ class MultiSearchParameters(SearchParameters): """ collection: str + rerank_hybrid_matches: typing.NotRequired[bool] class MultiSearchCommonParameters( From bf3fbcf09627e86eec29eb725d8e760070204736 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 4 Feb 2025 17:34:08 +0200 Subject: [PATCH 212/288] feat(search): add `max_filter_by_candidates` search param --- src/typesense/types/document.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 529e08d..2d2c5cf 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -368,6 +368,7 @@ class FilterParameters(typing.TypedDict): """ filter_by: typing.NotRequired[str] + max_filter_by_candidates: typing.NotRequired[int] enable_lazy_filter: typing.NotRequired[bool] From 26d41a32caf7bf4fe114794ee3b06730f5ab414b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Feb 2025 02:01:05 +0200 Subject: [PATCH 213/288] fix(api_call): use SessionFunctionKwargs directly instead of unpacking --- src/typesense/api_call.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/typesense/api_call.py b/src/typesense/api_call.py index 47d0eec..90e1929 100644 --- a/src/typesense/api_call.py +++ b/src/typesense/api_call.py @@ -334,7 +334,7 @@ def _execute_request( as_json: typing.Literal[True], last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> TEntityDict: """ Execute a request to the Typesense API with retry logic. @@ -373,7 +373,7 @@ def _execute_request( as_json: typing.Literal[False], last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> str: """ Execute a request to the Typesense API with retry logic. @@ -411,7 +411,7 @@ def _execute_request( as_json: typing.Union[typing.Literal[True], typing.Literal[False]] = True, last_exception: typing.Union[None, Exception] = None, num_retries: int = 0, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> typing.Union[TEntityDict, str]: """ Execute a request to the Typesense API with retry logic. @@ -473,7 +473,7 @@ def _make_request_and_process_response( url: str, entity_type: typing.Type[TEntityDict], as_json: bool, - **kwargs: typing.Any, + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> typing.Union[TEntityDict, str]: """Make the API request and process the response.""" request_response = self.request_handler.make_request( @@ -493,7 +493,7 @@ def _make_request_and_process_response( def _prepare_request_params( self, endpoint: str, - **kwargs: typing.Unpack[SessionFunctionKwargs[TParams, TBody]], + **kwargs: SessionFunctionKwargs[TParams, TBody], ) -> typing.Tuple[Node, str, SessionFunctionKwargs[TParams, TBody]]: node = self.node_manager.get_node() url = node.url() + endpoint From 9531a721d9ddea494a4a245b2e738230c90a0a60 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Feb 2025 02:08:53 +0200 Subject: [PATCH 214/288] chore: ignore rules for docstrings on overloads --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index d623aa3..bb8169c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,7 +24,7 @@ ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W per-file-ignores = tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202 - src/typesense/documents.py: WPS320, WPS428, WPS220 + src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 src/typesense/stemming_dictionaries.py: WPS320, WPS428, WPS220 src/typesense/api_call.py: WPS110, WPS211 src/typesense/request_handler.py: WPS110, WPS211 From 8e398d9ec53f5367ae857e4970c666bbcdb5039e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Feb 2025 02:10:28 +0200 Subject: [PATCH 215/288] chore: ignore rules for docstrings on overloads on stemming --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index bb8169c..ecafea2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -25,7 +25,7 @@ per-file-ignores = tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202 src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 - src/typesense/stemming_dictionaries.py: WPS320, WPS428, WPS220 + src/typesense/stemming_dictionaries.py: WPS320, E704, D102, WPS428, WPS220 src/typesense/api_call.py: WPS110, WPS211 src/typesense/request_handler.py: WPS110, WPS211 From a113035bd91bfa86bf66cb56571943ece3909ee5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Sun, 16 Feb 2025 12:30:23 +0200 Subject: [PATCH 216/288] test: change integration test for debug to avoid future failures - Check for existence of keys and assert their types, instead of checking for their values --- tests/debug_test.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/debug_test.py b/tests/debug_test.py index efe8e92..37c593a 100644 --- a/tests/debug_test.py +++ b/tests/debug_test.py @@ -47,9 +47,11 @@ def test_retrieve(fake_debug: Debug) -> None: def test_actual_retrieve(actual_debug: Debug) -> None: - """Test that the Debug object can retrieve a debug on Typesense server.""" - json_response: DebugResponseSchema = {"state": 1, "version": "27.1"} - + """Test that the Debug object can retrieve a debug on Typesense server and verify response structure.""" response = actual_debug.retrieve() - assert response == json_response + assert "state" in response + assert "version" in response + + assert isinstance(response["state"], int) + assert isinstance(response["version"], str) From 8058450f08867f581b19f5da10a9bb66cd1b0c36 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Feb 2025 15:02:03 +0200 Subject: [PATCH 217/288] ci: update typesense version to 28.0 --- .github/workflows/test-and-lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index 45b96d7..67ede67 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -14,7 +14,7 @@ jobs: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] services: typesense: - image: typesense/typesense:27.1 + image: typesense/typesense:28.0 ports: - 8108:8108 volumes: From f3a1f528cc39d7b88baa86b04d84810ea943d02f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Feb 2025 15:16:42 +0200 Subject: [PATCH 218/288] fix(tests): update tests for v28 --- tests/collection_test.py | 2 ++ tests/collections_test.py | 4 ++++ tests/documents_test.py | 1 - 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/collection_test.py b/tests/collection_test.py index d292be3..33c7837 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -197,6 +197,7 @@ def test_actual_retrieve( "sort": False, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, { @@ -209,6 +210,7 @@ def test_actual_retrieve( "sort": True, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, ], diff --git a/tests/collections_test.py b/tests/collections_test.py index 7df3a60..82f19ef 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -199,6 +199,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "sort": False, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, { @@ -211,6 +212,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "sort": False, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, ], @@ -265,6 +267,7 @@ def test_actual_retrieve( "sort": False, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, { @@ -277,6 +280,7 @@ def test_actual_retrieve( "sort": True, "infix": False, "stem": False, + "stem_dictionary": "", "store": True, }, ], diff --git a/tests/documents_test.py b/tests/documents_test.py index f0572e6..9926798 100644 --- a/tests/documents_test.py +++ b/tests/documents_test.py @@ -237,7 +237,6 @@ def test_import_fail( expected.append( { "code": 409, - "document": '{"company_name": "Wrong", "id": "0", "num_employees": 0}', "error": "A document with id 0 already exists.", "success": False, }, From fa51241bf1ccf14ce69e95e15c1943ae42fbaa53 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 28 Feb 2025 09:33:50 +0200 Subject: [PATCH 219/288] ci: remove tests from release workflow --- .github/workflows/release.yml | 24 +----------------------- 1 file changed, 1 insertion(+), 23 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ce1f9bc..266d4b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,29 +4,7 @@ on: tags: - 'v*' jobs: - tests: - name: Run Tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.8" - cache: 'pip' - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements/dev.txt - - - name: Run tests (excluding OpenAI) - run: coverage run -m pytest -m "not open_ai" - - - name: Show coverage report - run: coverage report - build-n-publish: + build-n-publish: name: Build and Publish needs: tests runs-on: ubuntu-latest From 2c192afd56e80389121a84100fb92dccdcfb1340 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 28 Feb 2025 09:35:03 +0200 Subject: [PATCH 220/288] ci: fix whitespace --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 266d4b2..d2bc5a0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,7 +4,7 @@ on: tags: - 'v*' jobs: - build-n-publish: + build-n-publish: name: Build and Publish needs: tests runs-on: ubuntu-latest From 9f865727c0f58e30399bc529663e314276aa7bcd Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 3 Mar 2025 13:08:10 +0200 Subject: [PATCH 221/288] feat(document): add `ignore_not_found` option to delete method - add `DeleteSingleDocumentParameters` type definition with `ignore_not_found` flag - update `Document.delete()` method to accept optional parameters - add tests for delete with `ignore_not_found` flag --- src/typesense/document.py | 12 ++++++++++-- src/typesense/types/document.py | 11 +++++++++++ tests/document_test.py | 27 +++++++++++++++++++++++++++ 3 files changed, 48 insertions(+), 2 deletions(-) diff --git a/src/typesense/document.py b/src/typesense/document.py index a707ee4..644602b 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -22,7 +22,11 @@ import sys from typesense.api_call import ApiCall -from typesense.types.document import DirtyValuesParameters, DocumentSchema +from typesense.types.document import ( + DeleteSingleDocumentParameters, + DirtyValuesParameters, + DocumentSchema, +) if sys.version_info >= (3, 11): import typing @@ -101,7 +105,10 @@ def update( ) return typing.cast(TDoc, response) - def delete(self) -> TDoc: + def delete( + self, + delete_parameters: typing.Union[DeleteSingleDocumentParameters, None] = None, + ) -> TDoc: """ Delete this specific document. @@ -111,6 +118,7 @@ def delete(self) -> TDoc: response: TDoc = self.api_call.delete( self._endpoint_path, entity_type=typing.Dict[str, str], + params=delete_parameters, ) return response diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 2d2c5cf..102ab02 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -827,6 +827,17 @@ class SearchResponse(typing.Generic[TDoc], typing.TypedDict): conversation: typing.NotRequired[Conversation] +class DeleteSingleDocumentParameters(typing.TypedDict): + """ + Parameters for deleting a single document. + + Attributes: + ignore_not_found (bool): Ignore not found documents. + """ + + ignore_not_found: typing.NotRequired[bool] + + class DeleteQueryParameters(typing.TypedDict): """ Parameters for deleting documents. diff --git a/tests/document_test.py b/tests/document_test.py index 42e1cba..ac3042c 100644 --- a/tests/document_test.py +++ b/tests/document_test.py @@ -2,6 +2,7 @@ from __future__ import annotations +import pytest import requests_mock from tests.fixtures.document_fixtures import Companies @@ -13,6 +14,7 @@ from typesense.api_call import ApiCall from typesense.document import Document from typesense.documents import Documents +from typesense.exceptions import ObjectNotFound def test_init(fake_api_call: ApiCall) -> None: @@ -133,3 +135,28 @@ def test_actual_delete( "company_name": "Company", "num_employees": 10, } + + +def test_actual_delete_non_existent( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + with pytest.raises(ObjectNotFound): + actual_documents["1"].delete() + + +def test_actual_delete_non_existent_ignore_not_found( + actual_documents: Documents, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the Document object can delete an document from Typesense Server.""" + response = actual_documents["1"].delete( + delete_parameters={"ignore_not_found": True}, + ) + + assert response == {"id": "1"} From 58cb9b234e227ceaa28145ed641ddad4f337c90b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 18 Mar 2025 13:48:51 +0200 Subject: [PATCH 222/288] chore: remove dependency on tests for build and publish pipeline --- .github/workflows/release.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index d2bc5a0..98c956e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,6 @@ on: jobs: build-n-publish: name: Build and Publish - needs: tests runs-on: ubuntu-latest environment: name: pypi From 56d0edf7595f88326a8b410a300c9d3ce168a515 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 18 Mar 2025 14:16:47 +0200 Subject: [PATCH 223/288] chore: bump ver to 1.0.3 --- src/typesense/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 9fb79c6..1a29ca2 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,5 +1,5 @@ from .client import Client # NOQA -__version__ = '1.0.0b1' +__version__ = '1.0.3' From f5faaaabad76b2f77a46387551937529d3301694 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 19 Mar 2025 16:45:14 +0200 Subject: [PATCH 224/288] chore: bundle typing extensions by default --- Pipfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Pipfile b/Pipfile index 4c9511e..564e6a5 100644 --- a/Pipfile +++ b/Pipfile @@ -5,6 +5,7 @@ name = "pypi" [packages] requests = "*" +typing-extensions = {version = "*", markers = "python_version < '3.11'"} [dev-packages] mypy = "*" @@ -17,7 +18,6 @@ pytest-mock = "*" requests-mock = "*" python-dotenv = "*" types-requests = "*" -typing-extensions = {version = "*", markers = "python_version < '3.11'"} faker = "*" [requires] From 3c24c9362de9bb65c1ae41b5e759a973b8388826 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 11:15:45 +0300 Subject: [PATCH 225/288] fix: add connection timeout seconds to config dict --- src/typesense/configuration.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index 96ea7e1..d59ac5e 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -78,6 +78,8 @@ class ConfigDict(typing.TypedDict): read_replica_nodes (list[typing.Union[str, NodeConfigDict]], deprecated): A list of dictionaries or URLs that represent the read replica nodes. + + connection_timeout_seconds (float): The connection timeout in seconds. """ nodes: typing.List[typing.Union[str, NodeConfigDict]] @@ -93,6 +95,7 @@ class ConfigDict(typing.TypedDict): read_replica_nodes: typing.NotRequired[ typing.List[typing.Union[str, NodeConfigDict]] ] # deprecated + connection_timeout_seconds: typing.NotRequired[float] class Node: From 5053b9d861b2cb60ad1627fa619744bf45a736cc Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 11:46:42 +0300 Subject: [PATCH 226/288] fix: add base document write parameters to the import type alias --- src/typesense/types/document.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 102ab02..3f0f9a8 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -300,6 +300,7 @@ class DocumentImportParametersReturnDocAndId(DocumentWriteParameters): DocumentImportParameters: typing.TypeAlias = typing.Union[ + DocumentWriteParameters, DocumentImportParametersReturnId, DocumentImportParametersReturnDoc, DocumentImportParametersReturnDocAndId, From 348fb52db9da8337cee5965cb5cc710e12d70861 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 18:17:05 +0300 Subject: [PATCH 227/288] feat(metrics): add metrics module types - Create `MetricsResponse` types to handle API responses - Add support for retrieving system metrics (CPU, memory, disk, network) - Add support for Typesense-specific memory metrics - Ensure compatibility with Python 3.11+ and earlier versions --- src/typesense/metrics.py | 97 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 src/typesense/metrics.py diff --git a/src/typesense/metrics.py b/src/typesense/metrics.py new file mode 100644 index 0000000..a368cb2 --- /dev/null +++ b/src/typesense/metrics.py @@ -0,0 +1,97 @@ +""" +This module provides functionality for retrieving metrics from the Typesense API. + +It contains the Metrics class, which handles API operations for retrieving +system and Typesense metrics such as CPU, memory, disk, and network usage. + +Classes: + MetricsResponse: Type definition for metrics response. + Metrics: Manages retrieving metrics from the Typesense API. + +Dependencies: + - typesense.api_call: Provides the ApiCall class for making API requests. + +Note: This module uses conditional imports to support both Python 3.11+ and earlier versions. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall + + +class MetricsResponseBase(typing.TypedDict): + """ + Response schema for metrics retrieval. + + This TypedDict includes system metrics like CPU, memory, disk, and network usage, + as well as Typesense-specific memory metrics. + + Attributes: + system_cpu_active_percentage (str): Overall CPU active percentage. + system_disk_total_bytes (str): Total disk space in bytes. + system_disk_used_bytes (str): Used disk space in bytes. + system_memory_total_bytes (str): Total system memory in bytes. + system_memory_used_bytes (str): Used system memory in bytes. + system_network_received_bytes (str): Total network bytes received. + system_network_sent_bytes (str): Total network bytes sent. + typesense_memory_active_bytes (str): Active memory used by Typesense. + typesense_memory_allocated_bytes (str): Allocated memory for Typesense. + typesense_memory_fragmentation_ratio (str): Memory fragmentation ratio. + typesense_memory_mapped_bytes (str): Mapped memory in bytes. + typesense_memory_metadata_bytes (str): Memory used for metadata. + typesense_memory_resident_bytes (str): Resident memory in bytes. + typesense_memory_retained_bytes (str): Retained memory in bytes. + """ + + system_cpu_active_percentage: str + system_disk_total_bytes: str + system_disk_used_bytes: str + system_memory_total_bytes: str + system_memory_used_bytes: str + system_network_received_bytes: str + system_network_sent_bytes: str + typesense_memory_active_bytes: str + typesense_memory_allocated_bytes: str + typesense_memory_fragmentation_ratio: str + typesense_memory_mapped_bytes: str + typesense_memory_metadata_bytes: str + typesense_memory_resident_bytes: str + typesense_memory_retained_bytes: str + + +class MetricsResponse(MetricsResponseBase): + """Extended MetricsResponse with optional per-CPU core metrics.""" + + system_memory_total_swap_bytes: str + system_memory_used_swap_bytes: str + system_cpu1_active_percentage: typing.Optional[str] + system_cpu2_active_percentage: typing.Optional[str] + system_cpu3_active_percentage: typing.Optional[str] + system_cpu4_active_percentage: typing.Optional[str] + system_cpu5_active_percentage: typing.Optional[str] + system_cpu6_active_percentage: typing.Optional[str] + system_cpu7_active_percentage: typing.Optional[str] + system_cpu8_active_percentage: typing.Optional[str] + system_cpu9_active_percentage: typing.Optional[str] + system_cpu10_active_percentage: typing.Optional[str] + system_cpu11_active_percentage: typing.Optional[str] + system_cpu12_active_percentage: typing.Optional[str] + system_cpu13_active_percentage: typing.Optional[str] + system_cpu14_active_percentage: typing.Optional[str] + system_cpu15_active_percentage: typing.Optional[str] + system_cpu16_active_percentage: typing.Optional[str] + system_cpu17_active_percentage: typing.Optional[str] + system_cpu18_active_percentage: typing.Optional[str] + system_cpu19_active_percentage: typing.Optional[str] + system_cpu20_active_percentage: typing.Optional[str] + system_cpu21_active_percentage: typing.Optional[str] + system_cpu22_active_percentage: typing.Optional[str] + system_cpu23_active_percentage: typing.Optional[str] + system_cpu24_active_percentage: typing.Optional[str] + + From 3b8da2f03105591a9db8c3f02b03a06c693fb4bf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 18:17:36 +0300 Subject: [PATCH 228/288] feat(metrics): implement metrics retrieval class - Add `Metrics` class with API integration - Create method to retrieve system and Typesense metrics - Define endpoint path for metrics retrieval - Connect with existing `MetricsResponse` type --- src/typesense/metrics.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/src/typesense/metrics.py b/src/typesense/metrics.py index a368cb2..3b48899 100644 --- a/src/typesense/metrics.py +++ b/src/typesense/metrics.py @@ -95,3 +95,39 @@ class MetricsResponse(MetricsResponseBase): system_cpu24_active_percentage: typing.Optional[str] +class Metrics: + """ + Manages metrics retrieval from the Typesense API. + + This class provides methods to retrieve system and Typesense metrics + such as CPU, memory, disk, and network usage. + + Attributes: + resource_path (str): The base path for metrics endpoint. + api_call (ApiCall): The ApiCall instance for making API requests. + """ + + resource_path: typing.Final[str] = "/metrics.json" + + def __init__(self, api_call: ApiCall): + """ + Initialize the Metrics instance. + + Args: + api_call (ApiCall): The ApiCall instance for making API requests. + """ + self.api_call = api_call + + def retrieve(self) -> MetricsResponse: + """ + Retrieve metrics from the Typesense API. + + Returns: + MetricsResponse: A dictionary containing system and Typesense metrics. + """ + response: MetricsResponse = self.api_call.get( + Metrics.resource_path, + as_json=True, + entity_type=MetricsResponse, + ) + return response From e4f3b2bcca9591160c92fb59010d4874cd988036 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 18:17:50 +0300 Subject: [PATCH 229/288] feat(client): integrate metrics module with client - Import `Metrics` class in `client.py` - Add `metrics` attribute to `Client` class documentation - Initialize `metrics` instance in `Client` constructor - Update module documentation to reference metrics dependency --- src/typesense/client.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/typesense/client.py b/src/typesense/client.py index dc7d09b..cde957b 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -17,6 +17,7 @@ - typesense.conversations_models: Provides the ConversationsModels class. - typesense.debug: Provides the Debug class. - typesense.keys: Provides the Keys class. + - typesense.metrics: Provides the Metrics class. - typesense.multi_search: Provides the MultiSearch class. - typesense.operations: Provides the Operations class. - typesense.stopwords: Provides the Stopwords class. @@ -43,6 +44,7 @@ from typesense.conversations_models import ConversationsModels from typesense.debug import Debug from typesense.keys import Keys +from typesense.metrics import Metrics from typesense.multi_search import MultiSearch from typesense.operations import Operations from typesense.stemming import Stemming @@ -72,6 +74,7 @@ class Client: operations (Operations): Instance for various Typesense operations. debug (Debug): Instance for debug operations. stopwords (Stopwords): Instance for managing stopwords. + metrics (Metrics): Instance for retrieving system and Typesense metrics. conversations_models (ConversationsModels): Instance for managing conversation models. """ @@ -102,6 +105,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) + self.metrics = Metrics(self.api_call) self.conversations_models = ConversationsModels(self.api_call) def typed_collection( From cc039f3915a31d5a0fc8bf00500180c6004a4c58 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 18:18:02 +0300 Subject: [PATCH 230/288] test(metrics): add test fixtures for metrics module - Create `metrics_fixtures.py` with pytest fixture - Set up fixture to return a `Metrics` object with actual API call - Configure fixture with function scope for isolated testing --- tests/fixtures/metrics_fixtures.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tests/fixtures/metrics_fixtures.py diff --git a/tests/fixtures/metrics_fixtures.py b/tests/fixtures/metrics_fixtures.py new file mode 100644 index 0000000..7da5bc2 --- /dev/null +++ b/tests/fixtures/metrics_fixtures.py @@ -0,0 +1,12 @@ +"""Fixtures for the Metrics class tests.""" + +import pytest + +from typesense.api_call import ApiCall +from typesense.metrics import Metrics + + +@pytest.fixture(scope="function", name="actual_metrics") +def actual_debug_fixture(actual_api_call: ApiCall) -> Metrics: + """Return a Debug object using a real API.""" + return Metrics(actual_api_call) From 50ac673299a0f15eeaf4e4d25a8fe8fce25b9cf2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Mon, 31 Mar 2025 18:18:13 +0300 Subject: [PATCH 231/288] test(metrics): add integration tests for metrics retrieval - Create `metrics_test.py` to test the `Metrics` class - Implement test for the `retrieve()` method - Verify all expected system metrics fields are p --- tests/metrics_test.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 tests/metrics_test.py diff --git a/tests/metrics_test.py b/tests/metrics_test.py new file mode 100644 index 0000000..1e1ea47 --- /dev/null +++ b/tests/metrics_test.py @@ -0,0 +1,26 @@ +"""Tests for the Debug class.""" + +from __future__ import annotations + +from typesense.metrics import Metrics + + +def test_actual_retrieve(actual_metrics: Metrics) -> None: + """Test that the Debug object can retrieve a debug on Typesense server and verify response structure.""" + response = actual_metrics.retrieve() + + assert "system_cpu_active_percentage" in response + assert "system_disk_total_bytes" in response + assert "system_disk_used_bytes" in response + assert "system_memory_total_bytes" in response + assert "system_memory_used_bytes" in response + assert "system_network_received_bytes" in response + assert "system_network_sent_bytes" in response + assert "typesense_memory_active_bytes" in response + assert "typesense_memory_allocated_bytes" in response + assert "typesense_memory_fragmentation_ratio" in response + + assert "typesense_memory_mapped_bytes" in response + assert "typesense_memory_metadata_bytes" in response + assert "typesense_memory_resident_bytes" in response + assert "typesense_memory_retained_bytes" in response \ No newline at end of file From d43d82068b18a09e46ce712c051d09e701c2b060 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 8 Apr 2025 16:29:46 +0300 Subject: [PATCH 232/288] feat(types): add hnsw parameters to field schema --- src/typesense/types/collection.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index e929fa6..b27de93 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -70,10 +70,22 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals symbols_to_index: typing.NotRequired[typing.List[str]] token_separators: typing.NotRequired[typing.List[str]] num_dim: typing.NotRequired[float] + hnsw_params: typing.NotRequired[HNSWParamsSchema] range_index: typing.NotRequired[bool] index: typing.NotRequired[bool] vec_dist: typing.NotRequired[typing.Union[typing.Literal["cosine", "ip"], str]] +class HNSWParamsSchema(typing.TypedDict): + """ + The schema for the HNSW parameters in the CollectionFieldSchema. + + Attributes: + M (int): The number of bi-directional links created for every new element. + ef_construction (int): The size of the dynamic list for the nearest neighbors. + """ + + M: typing.NotRequired[int] + ef_construction: typing.NotRequired[int] class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): """ From aabd06d38dc3466a40973844542ad5bdec75d6c5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 8 Apr 2025 17:18:09 +0300 Subject: [PATCH 233/288] chore: lint --- setup.cfg | 2 +- src/typesense/types/collection.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index ecafea2..088736f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,7 +23,7 @@ exclude = .git,__pycache__,venv,.eggs,*.egg,src/typesense/__init__.py ignore = Q000, WPS602, WPS432, WPS305, WPS221, WPS230, WPS234, WPS433, WPS440, W503, WPS331, WPS306, WPS237, WPS202, RST301, RST306, WPS214, WPS235, WPS226, WPS337, WPS320, F821, WPS201 per-file-ignores = tests/*.py: S101, WPS226, WPS118, WPS202, WPS204, WPS218, WPS211, WPS604, WPS431, WPS210, WPS201, WPS437 - src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202 + src/typesense/types/*.py: B950, WPS215, WPS111, WPS462, WPS322, WPS428, WPS114, WPS110, WPS202, WPS115 src/typesense/documents.py: WPS320, E704, D102, WPS428, WPS220 src/typesense/stemming_dictionaries.py: WPS320, E704, D102, WPS428, WPS220 src/typesense/api_call.py: WPS110, WPS211 diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index b27de93..4419347 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -75,6 +75,7 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals index: typing.NotRequired[bool] vec_dist: typing.NotRequired[typing.Union[typing.Literal["cosine", "ip"], str]] + class HNSWParamsSchema(typing.TypedDict): """ The schema for the HNSW parameters in the CollectionFieldSchema. @@ -87,6 +88,7 @@ class HNSWParamsSchema(typing.TypedDict): M: typing.NotRequired[int] ef_construction: typing.NotRequired[int] + class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): """ The schema of a regular field in a collection. From 1df5fe0edcff46b8650dbd39d4a55156183616f0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 9 Apr 2025 11:41:48 +0300 Subject: [PATCH 234/288] fix(types): move hnsw params class above ref --- src/typesense/types/collection.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 4419347..9e8a397 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -36,6 +36,19 @@ Locales = typing.Literal["ja", "zh", "ko", "th", "el", "ru", "rs", "uk", "be", ""] +class HNSWParamsSchema(typing.TypedDict): + """ + The schema for the HNSW parameters in the CollectionFieldSchema. + + Attributes: + M (int): The number of bi-directional links created for every new element. + ef_construction (int): The size of the dynamic list for the nearest neighbors. + """ + + M: typing.NotRequired[int] + ef_construction: typing.NotRequired[int] + + class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=False): """ CollectionFieldSchema represents the schema of a field in a collection. @@ -76,19 +89,6 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals vec_dist: typing.NotRequired[typing.Union[typing.Literal["cosine", "ip"], str]] -class HNSWParamsSchema(typing.TypedDict): - """ - The schema for the HNSW parameters in the CollectionFieldSchema. - - Attributes: - M (int): The number of bi-directional links created for every new element. - ef_construction (int): The size of the dynamic list for the nearest neighbors. - """ - - M: typing.NotRequired[int] - ef_construction: typing.NotRequired[int] - - class RegularCollectionFieldSchema(CollectionFieldSchema[_FieldType]): """ The schema of a regular field in a collection. From 8cb59afe093b2b505232dce2ac77860b6debf256 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 23 Apr 2025 11:37:05 +0300 Subject: [PATCH 235/288] feat(types): add missing search parameters to document types - Add `validate_field_names` to `QueryParameters` - Add `enable_synonyms`, `filter_curated_hits`, `synonym_prefix` to `RankingAndSortingParameters` - Add `facet_strategy` with literal type options to `FacetingParameters` - Add `enable_typos_for_alpha_numerical_tokens` and `synonym_num_typos` to `TypoToleranceParameters` --- src/typesense/types/document.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 3f0f9a8..b868d12 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -347,6 +347,8 @@ class QueryParameters(typing.TypedDict): preset (str): Preset for search queries. vector_query (str): Vector query for search. voice_query (str): Voice query for search. + stopwords (str, list[str]): A comma separated list of words to be dropped from the search query while searching. + validate_field_names (bool): Controls whether Typesense should validate if the fields exist in the schema. """ prefix: typing.NotRequired[typing.Union[str, bool, typing.List[bool]]] @@ -357,6 +359,8 @@ class QueryParameters(typing.TypedDict): preset: typing.NotRequired[str] vector_query: typing.NotRequired[str] voice_query: typing.NotRequired[str] + stopwords: typing.NotRequired[typing.Union[str, typing.List[str]]] + validate_field_names: typing.NotRequired[bool] class FilterParameters(typing.TypedDict): @@ -393,6 +397,9 @@ class RankingAndSortingParameters(typing.TypedDict): enable_overrides (bool): Enable overrides. override_tags (str, list[str]): Tags to override. max_candidates (int): Maximum number of candidates to return. + enable_synonyms (bool): If you have some synonyms defined but want to disable all of them for a particular search query, set `enable_synonyms` to `false`. + filter_curated_hits (bool): Whether the `filter_by` condition of the search query should be applicable to curated results (override definitions, pinned hits, hidden hits, etc. + synonym_prefix (bool): Allow synonym resolution on word prefixes in the query. """ query_by_weights: typing.NotRequired[typing.Union[str, typing.List[int]]] @@ -406,6 +413,9 @@ class RankingAndSortingParameters(typing.TypedDict): enable_overrides: typing.NotRequired[bool] override_tags: typing.NotRequired[typing.Union[str, typing.List[str]]] max_candidates: typing.NotRequired[int] + enable_synonyms: typing.NotRequired[bool] + filter_curated_hits: typing.NotRequired[bool] + synonym_prefix: typing.NotRequired[bool] class PaginationParameters(typing.TypedDict): @@ -437,6 +447,7 @@ class FacetingParameters(typing.TypedDict): facet_return_parent (str): Return parent of facet. facet_sample_percent (int): Sample percentage of facet values to return. facet_sample_threshold (int): Sample threshold of facet values to return. + facet_strategy (str): Typesense supports two strategies for efficient faceting, and has some built-in heuristics to pick the right strategy for you. """ facet_by: typing.NotRequired[typing.Union[str, typing.List[str]]] @@ -446,6 +457,13 @@ class FacetingParameters(typing.TypedDict): facet_return_parent: typing.NotRequired[str] facet_sample_percent: typing.NotRequired[int] facet_sample_threshold: typing.NotRequired[int] + facet_strategy: typing.NotRequired[ + typing.Union[ + typing.Literal["exhaustive"], + typing.Literal["top_values"], + typing.Literal["automatic"], # default + ] + ] class GroupingParameters(typing.TypedDict): @@ -518,6 +536,10 @@ class TypoToleranceParameters(typing.TypedDict): - `left_to_right`: Drop tokens from left to right. - `both_sides:3`: Drop tokens from both sides with a threshold of 3. Afterwards, drops back to the default right to left. + + enable_typos_for_numerical_tokens (bool): Set this parameter to `false` to disable typos on numerical query tokens. + enable_typos_for_alpha_numerical_tokens (bool): Set this parameter to `false` to disable typos on alphanumerical query tokens. + synonym_num_typos (int): Allow synonym resolution on typo-corrected words in the query. """ num_typos: typing.NotRequired[int] @@ -530,6 +552,8 @@ class TypoToleranceParameters(typing.TypedDict): typing.Literal["right_to_left", "left_to_right", "both_sides:3"] ] enable_typos_for_numerical_tokens: typing.NotRequired[bool] + enable_typos_for_alpha_numerical_tokens: typing.NotRequired[bool] + synonym_num_typos: typing.NotRequired[int] class CachingParameters(typing.TypedDict): From 0c820a73247e2ec26559af062b7fb7f65cc0c41f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 23 Apr 2025 11:44:59 +0300 Subject: [PATCH 236/288] fix(api): improve error handling for invalid JSON responses - enhance `_get_error_message` to handle JSON decode errors - add detailed error message including the invalid response text - add tests covering valid JSON, invalid JSON, and non-JSON responses --- src/typesense/request_handler.py | 7 +++++-- tests/api_call_test.py | 35 ++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/src/typesense/request_handler.py b/src/typesense/request_handler.py index b9f822a..18ece90 100644 --- a/src/typesense/request_handler.py +++ b/src/typesense/request_handler.py @@ -260,8 +260,11 @@ def _get_error_message(response: requests.Response) -> str: """ content_type = response.headers.get("Content-Type", "") if content_type.startswith("application/json"): - err_message: str = response.json().get("message", "API error.") - return err_message + try: + err_message: str = response.json().get("message", "API error.") + return err_message + except requests.exceptions.JSONDecodeError: + return f"API error: Invalid JSON response: {response.text}" return "API error." @staticmethod diff --git a/tests/api_call_test.py b/tests/api_call_test.py index caaa4a1..1d5fa11 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -94,6 +94,41 @@ def test_get_exception() -> None: assert RequestHandler._get_exception(999) == exceptions.TypesenseClientError +def test_get_error_message_with_invalid_json() -> None: + """Test that it correctly handles invalid JSON in error responses.""" + response = requests.Response() + response.headers["Content-Type"] = "application/json" + response.status_code = 400 + # Set an invalid JSON string that would cause JSONDecodeError + response._content = b'{"message": "Error occurred", "details": {"key": "value"' + + error_message = RequestHandler._get_error_message(response) + assert "API error: Invalid JSON response:" in error_message + assert '{"message": "Error occurred", "details": {"key": "value"' in error_message + + +def test_get_error_message_with_valid_json() -> None: + """Test that it correctly extracts error message from valid JSON responses.""" + response = requests.Response() + response.headers["Content-Type"] = "application/json" + response.status_code = 400 + response._content = b'{"message": "Error occurred", "details": {"key": "value"}}' + + error_message = RequestHandler._get_error_message(response) + assert error_message == "Error occurred" + + +def test_get_error_message_with_non_json_content_type() -> None: + """Test that it returns a default error message for non-JSON content types.""" + response = requests.Response() + response.headers["Content-Type"] = "text/plain" + response.status_code = 400 + response._content = b'Not a JSON content' + + error_message = RequestHandler._get_error_message(response) + assert error_message == "API error." + + def test_normalize_params_with_booleans() -> None: """Test that it correctly normalizes boolean values to strings.""" parameter_dict: typing.Dict[str, str | bool] = {"key1": True, "key2": False} From 5b32db0d2ff24b4e21c49c99ed8568f7367a4ad1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 23 Apr 2025 11:50:29 +0300 Subject: [PATCH 237/288] refactor(api): fix linting issue with try block --- src/typesense/request_handler.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/typesense/request_handler.py b/src/typesense/request_handler.py index 18ece90..e726379 100644 --- a/src/typesense/request_handler.py +++ b/src/typesense/request_handler.py @@ -261,8 +261,7 @@ def _get_error_message(response: requests.Response) -> str: content_type = response.headers.get("Content-Type", "") if content_type.startswith("application/json"): try: - err_message: str = response.json().get("message", "API error.") - return err_message + return typing.cast(str, response.json().get("message", "API error.")) except requests.exceptions.JSONDecodeError: return f"API error: Invalid JSON response: {response.text}" return "API error." From 47285fc63b0be1f23afbfdf760f2c4cd8eaaac84 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 23 Apr 2025 13:37:38 +0300 Subject: [PATCH 238/288] feat(collections): add `__contains__` method for collection existence - implement `__contains__` method to check if collection exists - enable use of `in` operator with collection names - handle exceptions gracefully when unable to retrieve collections --- src/typesense/collections.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 723b2de..65489ba 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -57,6 +57,22 @@ def __init__(self, api_call: ApiCall): self.api_call = api_call self.collections: typing.Dict[str, Collection[TDoc]] = {} + def __contains__(self, collection_name: str) -> bool: + """ + Check if a collection exists in Typesense. + + Args: + collection_name (str): The name of the collection to check. + + Returns: + bool: True if the collection exists, False otherwise. + """ + try: + all_collections = self.retrieve() + except Exception: + return False + return any(coll["name"] == collection_name for coll in all_collections) + def __getitem__(self, collection_name: str) -> Collection[TDoc]: """ Get or create a Collection instance for a given collection name. From b02f552cf3ed6158092a3514bde3e4bfad118bed Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 23 Apr 2025 14:13:10 +0300 Subject: [PATCH 239/288] fix(collections): correct logic in `exists()` method - fix existing condition in `Collections.exists()` method - add tests to verify collection existence checking - ensure proper return when collection is in cache --- src/typesense/collections.py | 15 +++++++++++++-- tests/collections_test.py | 13 +++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 65489ba..89d1fbb 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -61,17 +61,28 @@ def __contains__(self, collection_name: str) -> bool: """ Check if a collection exists in Typesense. + This method tries to retrieve the specified collection to check for its existence, + utilizing the Collection.retrieve() method but without caching non-existent collections. + Args: collection_name (str): The name of the collection to check. Returns: bool: True if the collection exists, False otherwise. """ + if collection_name in self.collections: + try: + self.collections[collection_name].retrieve() + return True + except Exception: + self.collections.pop(collection_name, None) + return False + try: - all_collections = self.retrieve() + Collection(self.api_call, collection_name).retrieve() + return True except Exception: return False - return any(coll["name"] == collection_name for coll in all_collections) def __getitem__(self, collection_name: str) -> Collection[TDoc]: """ diff --git a/tests/collections_test.py b/tests/collections_test.py index 82f19ef..189a3f4 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -293,3 +293,16 @@ def test_actual_retrieve( response[0].pop("created_at") assert response == expected + + +def test_actual_contains( + actual_collections: Collections, + delete_all: None, + create_collection: None, +) -> None: + """Test that the Collections object can check if a collection exists in Typesense.""" + # Test for existing collection + assert "companies" in actual_collections + + # Test for non-existing collection + assert "non_existent_collection" not in actual_collections \ No newline at end of file From 3cb33f641a92cc7eba837561c98cfd923b5e2ecf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 24 Apr 2025 11:53:04 +0300 Subject: [PATCH 240/288] test(collection): add another non-existent test --- tests/collections_test.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/collections_test.py b/tests/collections_test.py index 189a3f4..84971bd 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -305,4 +305,6 @@ def test_actual_contains( assert "companies" in actual_collections # Test for non-existing collection - assert "non_existent_collection" not in actual_collections \ No newline at end of file + assert "non_existent_collection" not in actual_collections + # Test again + assert "non_existent_collection" not in actual_collections From f59ac3765dba9f11b8b9800ab2b7c5165a18f85f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 24 Apr 2025 12:09:29 +0300 Subject: [PATCH 241/288] chore: lint --- src/typesense/collections.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 89d1fbb..72fa381 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -71,14 +71,15 @@ def __contains__(self, collection_name: str) -> bool: bool: True if the collection exists, False otherwise. """ if collection_name in self.collections: - try: - self.collections[collection_name].retrieve() + try: # noqa: WPS229, WPS529 + + self.collections[collection_name].retrieve() # noqa: WPS529 return True except Exception: self.collections.pop(collection_name, None) return False - - try: + + try: # noqa: WPS229, WPS529 Collection(self.api_call, collection_name).retrieve() return True except Exception: From 52dacbd0dc3be5012dc865a415f820a8b0552a43 Mon Sep 17 00:00:00 2001 From: Elias <38086802+HandcartCactus@users.noreply.github.com> Date: Sun, 27 Apr 2025 00:45:22 -0400 Subject: [PATCH 242/288] bugfix: remove infinite loop from `index_and_search.py` +pagination --- examples/index_and_search.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/index_and_search.py b/examples/index_and_search.py index e1422cf..219c0a0 100644 --- a/examples/index_and_search.py +++ b/examples/index_and_search.py @@ -67,6 +67,9 @@ res = client.collections['books'].documents.search({ 'q': 'the', 'query_by': 'title', - 'sort_by': 'ratings_count:desc' + 'sort_by': 'ratings_count:desc', + 'page': i, + 'per_page': 10, }) print(res['found']) + i += 1 From b7323bedf420d4780146e9679d22d5cda1e2ea68 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 15 May 2025 10:04:38 +0300 Subject: [PATCH 243/288] feat(document): add parameter support to document retrieval - add `params` parameter to `Document.retrieve()` method - create new `RetrieveParameters` typed dictionary - support including/excluding fields when retrieving documents --- src/typesense/document.py | 4 +++- src/typesense/types/document.py | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/typesense/document.py b/src/typesense/document.py index 644602b..907d1c7 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -26,6 +26,7 @@ DeleteSingleDocumentParameters, DirtyValuesParameters, DocumentSchema, + RetrieveParameters, ) if sys.version_info >= (3, 11): @@ -67,7 +68,7 @@ def __init__( self.collection_name = collection_name self.document_id = document_id - def retrieve(self) -> TDoc: + def retrieve(self, params: RetrieveParameters) -> TDoc: """ Retrieve this specific document. @@ -78,6 +79,7 @@ def retrieve(self) -> TDoc: endpoint=self._endpoint_path, entity_type=typing.Dict[str, str], as_json=True, + params=params, ) return response diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index b868d12..416ca7e 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -889,3 +889,16 @@ class DeleteResponse(typing.TypedDict): """ num_deleted: int + + +class RetrieveParameters(typing.TypedDict): + """ + Parameters for retrieving documents. + + Attributes: + include_fields (str): Fields to include in the retrieved documents. + exclude_fields (str): Fields to exclude from the retrieved documents. + """ + + include_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] + exclude_fields: typing.NotRequired[typing.Union[str, typing.List[str]]] From 3ba2ef22ec30bf3c9c4c8887a7dd6f0e0079b031 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Thu, 15 May 2025 10:10:40 +0300 Subject: [PATCH 244/288] chore: lint --- src/typesense/document.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/typesense/document.py b/src/typesense/document.py index 907d1c7..fd5de16 100644 --- a/src/typesense/document.py +++ b/src/typesense/document.py @@ -68,7 +68,10 @@ def __init__( self.collection_name = collection_name self.document_id = document_id - def retrieve(self, params: RetrieveParameters) -> TDoc: + def retrieve( + self, + retrieve_parameters: typing.Union[RetrieveParameters, None] = None, + ) -> TDoc: """ Retrieve this specific document. @@ -79,7 +82,7 @@ def retrieve(self, params: RetrieveParameters) -> TDoc: endpoint=self._endpoint_path, entity_type=typing.Dict[str, str], as_json=True, - params=params, + params=retrieve_parameters, ) return response From b0d477bc995558f77069d2be12c4460744bcb5b0 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Tue, 20 May 2025 22:12:35 +0400 Subject: [PATCH 245/288] Bump version --- src/typesense/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 1a29ca2..c83493e 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,5 +1,5 @@ from .client import Client # NOQA -__version__ = '1.0.3' +__version__ = '1.1.1' From 16707f773a6515b7705d42e2ff4ae242588a828f Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:16:17 +0300 Subject: [PATCH 246/288] build: transition dependency management to uv --- .python-version | 1 - Pipfile | 24 - Pipfile.lock | 905 -------------------------------- pyproject.toml | 43 +- requirements-dev.txt | 74 +++ requirements.txt | 14 +- requirements/common.txt | 6 - requirements/dev.txt | 64 --- src/typesense/__init__.py | 3 +- uv.lock | 1041 +++++++++++++++++++++++++++++++++++++ 10 files changed, 1161 insertions(+), 1014 deletions(-) delete mode 100644 .python-version delete mode 100644 Pipfile delete mode 100644 Pipfile.lock create mode 100644 requirements-dev.txt delete mode 100644 requirements/common.txt delete mode 100644 requirements/dev.txt create mode 100644 uv.lock diff --git a/.python-version b/.python-version deleted file mode 100644 index cc1923a..0000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.8 diff --git a/Pipfile b/Pipfile deleted file mode 100644 index 564e6a5..0000000 --- a/Pipfile +++ /dev/null @@ -1,24 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -requests = "*" -typing-extensions = {version = "*", markers = "python_version < '3.11'"} - -[dev-packages] -mypy = "*" -wemake-python-styleguide = "*" -flake8 = "*" -black = "*" -pytest = "*" -coverage = "*" -pytest-mock = "*" -requests-mock = "*" -python-dotenv = "*" -types-requests = "*" -faker = "*" - -[requires] -python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 73fd24d..0000000 --- a/Pipfile.lock +++ /dev/null @@ -1,905 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "cb14364b2d2eeadbe1841052b4943674e278b784f994853d1a394128c4a5cbac" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "certifi": { - "hashes": [ - "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", - "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" - ], - "markers": "python_version >= '3.6'", - "version": "==2024.8.30" - }, - "charset-normalizer": { - "hashes": [ - "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", - "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", - "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", - "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", - "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", - "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", - "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", - "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", - "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", - "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", - "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", - "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", - "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", - "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", - "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", - "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", - "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", - "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", - "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", - "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", - "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", - "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", - "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", - "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", - "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", - "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", - "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", - "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", - "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", - "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", - "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", - "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", - "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", - "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", - "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", - "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", - "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", - "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", - "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", - "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", - "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", - "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", - "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", - "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", - "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", - "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", - "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", - "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", - "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", - "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", - "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", - "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", - "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", - "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", - "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", - "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", - "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", - "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", - "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", - "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", - "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", - "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", - "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", - "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", - "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", - "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", - "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", - "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", - "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", - "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", - "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", - "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", - "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", - "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", - "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", - "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", - "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", - "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", - "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", - "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", - "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", - "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", - "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", - "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", - "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", - "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", - "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", - "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", - "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", - "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.3.2" - }, - "idna": { - "hashes": [ - "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", - "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" - ], - "markers": "python_version >= '3.6'", - "version": "==3.8" - }, - "requests": { - "hashes": [ - "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", - "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.32.3" - }, - "urllib3": { - "hashes": [ - "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", - "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" - ], - "markers": "python_version >= '3.8'", - "version": "==2.2.2" - } - }, - "develop": { - "astor": { - "hashes": [ - "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", - "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.8.1" - }, - "attrs": { - "hashes": [ - "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", - "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2" - ], - "markers": "python_version >= '3.7'", - "version": "==24.2.0" - }, - "bandit": { - "hashes": [ - "sha256:52077cb339000f337fb25f7e045995c4ad01511e716e5daac37014b9752de8ec", - "sha256:7c395a436743018f7be0a4cbb0a4ea9b902b6d87264ddecf8cfdc73b4f78ff61" - ], - "markers": "python_version >= '3.8'", - "version": "==1.7.9" - }, - "black": { - "hashes": [ - "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6", - "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e", - "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", - "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", - "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e", - "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd", - "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", - "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", - "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", - "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42", - "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", - "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb", - "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", - "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb", - "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", - "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", - "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47", - "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2", - "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a", - "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c", - "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920", - "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==24.8.0" - }, - "certifi": { - "hashes": [ - "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", - "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" - ], - "markers": "python_version >= '3.6'", - "version": "==2024.8.30" - }, - "charset-normalizer": { - "hashes": [ - "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", - "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", - "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786", - "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", - "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", - "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", - "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", - "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", - "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", - "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", - "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", - "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", - "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", - "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6", - "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", - "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", - "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", - "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", - "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714", - "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", - "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", - "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", - "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", - "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", - "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", - "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", - "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", - "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", - "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", - "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", - "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", - "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", - "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", - "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", - "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", - "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", - "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", - "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", - "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", - "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", - "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", - "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", - "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", - "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", - "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99", - "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c", - "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", - "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811", - "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", - "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", - "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", - "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", - "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", - "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c", - "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", - "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", - "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", - "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", - "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985", - "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", - "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", - "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", - "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", - "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", - "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", - "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", - "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8", - "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", - "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5", - "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5", - "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711", - "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", - "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", - "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", - "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", - "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4", - "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", - "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", - "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", - "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", - "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", - "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", - "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", - "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", - "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", - "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", - "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", - "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", - "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", - "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.3.2" - }, - "click": { - "hashes": [ - "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", - "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.7" - }, - "coverage": { - "hashes": [ - "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", - "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", - "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", - "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", - "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", - "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", - "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", - "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", - "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", - "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", - "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", - "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", - "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", - "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", - "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", - "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", - "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", - "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", - "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", - "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", - "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", - "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", - "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", - "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", - "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", - "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", - "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", - "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", - "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", - "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", - "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", - "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", - "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", - "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", - "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", - "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", - "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", - "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", - "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", - "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", - "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", - "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", - "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", - "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", - "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", - "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", - "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", - "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", - "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", - "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", - "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", - "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", - "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", - "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", - "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", - "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", - "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", - "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", - "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", - "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", - "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", - "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", - "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", - "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", - "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", - "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", - "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", - "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", - "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", - "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", - "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", - "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==7.6.1" - }, - "darglint": { - "hashes": [ - "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da", - "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d" - ], - "markers": "python_version >= '3.6' and python_version < '4.0'", - "version": "==1.8.1" - }, - "docutils": { - "hashes": [ - "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6", - "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b" - ], - "markers": "python_version >= '3.7'", - "version": "==0.20.1" - }, - "eradicate": { - "hashes": [ - "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37", - "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e" - ], - "version": "==2.3.0" - }, - "exceptiongroup": { - "hashes": [ - "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", - "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" - ], - "markers": "python_version < '3.11'", - "version": "==1.2.2" - }, - "faker": { - "hashes": [ - "sha256:0d3c0399204aaf8205cc1750db443474ca0436f177126b2c27b798e8336cc74f", - "sha256:6a3a08be54c37e05f7943d7ba5211d252c1de737687a46ad6f29209d8d5db11f" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==28.0.0" - }, - "flake8": { - "hashes": [ - "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38", - "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213" - ], - "index": "pypi", - "markers": "python_full_version >= '3.8.1'", - "version": "==7.1.1" - }, - "flake8-bandit": { - "hashes": [ - "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e", - "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d" - ], - "markers": "python_version >= '3.6'", - "version": "==4.1.1" - }, - "flake8-broken-line": { - "hashes": [ - "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9", - "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85" - ], - "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==1.0.0" - }, - "flake8-bugbear": { - "hashes": [ - "sha256:32b2903e22331ae04885dae25756a32a8c666c85142e933f43512a70f342052a", - "sha256:83324bad4d90fee4bf64dd69c61aff94debf8073fbd807c8b6a36eec7a2f0719" - ], - "markers": "python_full_version >= '3.8.1'", - "version": "==23.12.2" - }, - "flake8-commas": { - "hashes": [ - "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263", - "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54" - ], - "version": "==2.1.0" - }, - "flake8-comprehensions": { - "hashes": [ - "sha256:923c22603e0310376a6b55b03efebdc09753c69f2d977755cba8bb73458a5d4d", - "sha256:b7e027bbb52be2ceb779ee12484cdeef52b0ad3c1fcb8846292bdb86d3034681" - ], - "markers": "python_version >= '3.8'", - "version": "==3.15.0" - }, - "flake8-debugger": { - "hashes": [ - "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf", - "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840" - ], - "markers": "python_version >= '3.7'", - "version": "==4.1.2" - }, - "flake8-docstrings": { - "hashes": [ - "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", - "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75" - ], - "markers": "python_version >= '3.7'", - "version": "==1.7.0" - }, - "flake8-eradicate": { - "hashes": [ - "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22", - "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6" - ], - "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==1.5.0" - }, - "flake8-isort": { - "hashes": [ - "sha256:0fec4dc3a15aefbdbe4012e51d5531a2eb5fa8b981cdfbc882296a59b54ede12", - "sha256:c1f82f3cf06a80c13e1d09bfae460e9666255d5c780b859f19f8318d420370b3" - ], - "markers": "python_version >= '3.8'", - "version": "==6.1.1" - }, - "flake8-quotes": { - "hashes": [ - "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c" - ], - "version": "==3.4.0" - }, - "flake8-rst-docstrings": { - "hashes": [ - "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4", - "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.3.0" - }, - "flake8-string-format": { - "hashes": [ - "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", - "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af" - ], - "version": "==0.3.0" - }, - "idna": { - "hashes": [ - "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac", - "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603" - ], - "markers": "python_version >= '3.6'", - "version": "==3.8" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "isort": { - "hashes": [ - "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", - "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6" - ], - "markers": "python_full_version >= '3.8.0'", - "version": "==5.13.2" - }, - "markdown-it-py": { - "hashes": [ - "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", - "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" - ], - "markers": "python_version >= '3.8'", - "version": "==3.0.0" - }, - "mccabe": { - "hashes": [ - "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", - "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" - ], - "markers": "python_version >= '3.6'", - "version": "==0.7.0" - }, - "mdurl": { - "hashes": [ - "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", - "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" - ], - "markers": "python_version >= '3.7'", - "version": "==0.1.2" - }, - "mypy": { - "hashes": [ - "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", - "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce", - "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", - "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b", - "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", - "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24", - "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383", - "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7", - "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86", - "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d", - "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", - "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8", - "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", - "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", - "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", - "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef", - "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6", - "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", - "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", - "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70", - "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", - "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", - "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a", - "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", - "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1", - "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b", - "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.11.2" - }, - "mypy-extensions": { - "hashes": [ - "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", - "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.0" - }, - "packaging": { - "hashes": [ - "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", - "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" - ], - "markers": "python_version >= '3.8'", - "version": "==24.1" - }, - "pathspec": { - "hashes": [ - "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", - "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" - ], - "markers": "python_version >= '3.8'", - "version": "==0.12.1" - }, - "pbr": { - "hashes": [ - "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24", - "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a" - ], - "markers": "python_version >= '2.6'", - "version": "==6.1.0" - }, - "pep8-naming": { - "hashes": [ - "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", - "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80" - ], - "markers": "python_version >= '3.7'", - "version": "==0.13.3" - }, - "platformdirs": { - "hashes": [ - "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee", - "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3" - ], - "markers": "python_version >= '3.8'", - "version": "==4.2.2" - }, - "pluggy": { - "hashes": [ - "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", - "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" - ], - "markers": "python_version >= '3.8'", - "version": "==1.5.0" - }, - "pycodestyle": { - "hashes": [ - "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3", - "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521" - ], - "markers": "python_version >= '3.8'", - "version": "==2.12.1" - }, - "pydocstyle": { - "hashes": [ - "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", - "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1" - ], - "markers": "python_version >= '3.6'", - "version": "==6.3.0" - }, - "pyflakes": { - "hashes": [ - "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f", - "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a" - ], - "markers": "python_version >= '3.8'", - "version": "==3.2.0" - }, - "pygments": { - "hashes": [ - "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", - "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a" - ], - "markers": "python_version >= '3.8'", - "version": "==2.18.0" - }, - "pytest": { - "hashes": [ - "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5", - "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==8.3.2" - }, - "pytest-mock": { - "hashes": [ - "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", - "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==3.14.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", - "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.0.post0" - }, - "python-dotenv": { - "hashes": [ - "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", - "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==1.0.1" - }, - "pyyaml": { - "hashes": [ - "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff", - "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", - "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", - "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", - "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", - "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", - "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", - "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", - "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", - "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", - "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a", - "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", - "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", - "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", - "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", - "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", - "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", - "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a", - "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", - "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", - "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", - "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", - "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", - "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", - "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", - "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", - "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", - "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", - "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", - "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706", - "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", - "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", - "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", - "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083", - "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", - "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", - "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", - "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", - "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", - "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", - "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", - "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", - "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", - "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", - "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5", - "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d", - "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", - "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", - "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", - "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", - "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", - "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", - "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4" - ], - "markers": "python_version >= '3.8'", - "version": "==6.0.2" - }, - "requests": { - "hashes": [ - "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", - "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.32.3" - }, - "requests-mock": { - "hashes": [ - "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", - "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401" - ], - "index": "pypi", - "markers": "python_version >= '3.5'", - "version": "==1.12.1" - }, - "restructuredtext-lint": { - "hashes": [ - "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45" - ], - "version": "==1.4.0" - }, - "rich": { - "hashes": [ - "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc", - "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==13.8.0" - }, - "setuptools": { - "hashes": [ - "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f", - "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e" - ], - "markers": "python_version >= '3.8'", - "version": "==74.0.0" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "snowballstemmer": { - "hashes": [ - "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", - "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" - ], - "version": "==2.2.0" - }, - "stevedore": { - "hashes": [ - "sha256:1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78", - "sha256:9a64265f4060312828151c204efbe9b7a9852a0d9228756344dbc7e4023e375a" - ], - "markers": "python_version >= '3.8'", - "version": "==5.3.0" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, - "types-requests": { - "hashes": [ - "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358", - "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3" - ], - "index": "pypi", - "markers": "python_version >= '3.8'", - "version": "==2.32.0.20240712" - }, - "typing-extensions": { - "hashes": [ - "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", - "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" - ], - "markers": "python_version < '3.11'", - "version": "==4.12.2" - }, - "urllib3": { - "hashes": [ - "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472", - "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168" - ], - "markers": "python_version >= '3.8'", - "version": "==2.2.2" - }, - "wemake-python-styleguide": { - "hashes": [ - "sha256:2219be145185edcd5e01f4ce49e3dea11acc34f2c377face0c175bb6ea6ac988", - "sha256:69139858cf5b2a9ba09dac136e2873a4685515768f68fdef2684ebefd7b1dafd" - ], - "index": "pypi", - "markers": "python_version < '4.0' and python_full_version >= '3.8.1'", - "version": "==0.18.0" - } - } -} diff --git a/pyproject.toml b/pyproject.toml index 2c58e45..9115e67 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,17 +1,14 @@ -[build-system] -requires = ["setuptools"] -build-backend = "setuptools.build_meta" - [project] name = "typesense" description = "Python client for Typesense, an open source and typo tolerant search engine." -authors = [ - {"name" = "Typesense", "email" = "contact@typesense.org"}, -] +authors = [{ name = "Typesense", email = "contact@typesense.org" }] +requires-python = ">=3.9" readme = "README.md" -requires-python = ">=3" -keywords = ["search", "typesense"] -license = {"text" = "Apache 2.0"} +license = { text = "Apache 2.0" } +keywords = [ + "search", + "typesense", +] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", @@ -27,6 +24,32 @@ Documentation = "https://typesense.org/" Source = "https://github.com/typesense/typesense-python" Tracker = "https://github.com/typesense/typesense-python/issues" +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[dependency-groups] +dev = [ + "mypy", + "wemake-python-styleguide", + "flake8", + "black", + "pytest", + "coverage", + "pytest-mock", + "requests-mock", + "python-dotenv", + "types-requests", + "faker", +] + +[tool.uv] +package = false + +[[tool.uv.index]] +name = "pypi" +url = "https://pypi.org/simple" + [tool.setuptools.dynamic] version = {attr = "typesense.__version__"} diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..fb6c8cf --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,74 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml --group dev -o requirements-dev.txt +attrs==25.3.0 + # via wemake-python-styleguide +black==25.1.0 + # via typesense (pyproject.toml:dev) +certifi==2025.4.26 + # via requests +charset-normalizer==3.4.2 + # via requests +click==8.2.1 + # via black +coverage==7.8.2 + # via typesense (pyproject.toml:dev) +faker==37.3.0 + # via typesense (pyproject.toml:dev) +flake8==7.2.0 + # via + # typesense (pyproject.toml:dev) + # wemake-python-styleguide +idna==3.10 + # via requests +iniconfig==2.1.0 + # via pytest +mccabe==0.7.0 + # via flake8 +mypy==1.15.0 + # via typesense (pyproject.toml:dev) +mypy-extensions==1.1.0 + # via + # black + # mypy +packaging==25.0 + # via + # black + # pytest +pathspec==0.12.1 + # via black +platformdirs==4.3.8 + # via black +pluggy==1.6.0 + # via pytest +pycodestyle==2.13.0 + # via flake8 +pyflakes==3.3.2 + # via flake8 +pygments==2.19.1 + # via wemake-python-styleguide +pytest==8.3.5 + # via + # typesense (pyproject.toml:dev) + # pytest-mock +pytest-mock==3.14.1 + # via typesense (pyproject.toml:dev) +python-dotenv==1.1.0 + # via typesense (pyproject.toml:dev) +requests==2.32.3 + # via + # typesense (pyproject.toml) + # requests-mock +requests-mock==1.12.1 + # via typesense (pyproject.toml:dev) +types-requests==2.32.0.20250515 + # via typesense (pyproject.toml:dev) +typing-extensions==4.13.2 + # via mypy +tzdata==2025.2 + # via faker +urllib3==2.4.0 + # via + # requests + # types-requests +wemake-python-styleguide==1.1.0 + # via typesense (pyproject.toml:dev) diff --git a/requirements.txt b/requirements.txt index 9bdf039..d18b8a6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,12 @@ --i https://pypi.org/simple --r requirements/common.txt +# This file was autogenerated by uv via the following command: +# uv pip compile pyproject.toml -o requirements.txt +certifi==2024.8.30 + # via requests +charset-normalizer==3.3.2 + # via requests +idna==3.8 + # via requests +requests==2.32.3 + # via typesense (pyproject.toml) +urllib3==2.2.2 + # via requests diff --git a/requirements/common.txt b/requirements/common.txt deleted file mode 100644 index 7b89932..0000000 --- a/requirements/common.txt +++ /dev/null @@ -1,6 +0,0 @@ --i https://pypi.org/simple -certifi==2024.8.30; python_version >= '3.6' -charset-normalizer==3.3.2; python_full_version >= '3.7.0' -idna==3.8; python_version >= '3.6' -requests==2.32.3; python_version >= '3.8' -urllib3==2.2.2; python_version >= '3.8' diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index 1b4f41e..0000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,64 +0,0 @@ --i https://pypi.org/simple --r common.txt -astor==0.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -attrs==24.2.0; python_version >= '3.7' -bandit==1.7.9; python_version >= '3.8' -black==24.8.0; python_version >= '3.8' -certifi==2024.8.30; python_version >= '3.6' -charset-normalizer==3.3.2; python_full_version >= '3.7.0' -click==8.1.7; python_version >= '3.7' -coverage==7.6.1; python_version >= '3.8' -darglint==1.8.1; python_version >= '3.6' and python_version < '4.0' -docutils==0.20.1; python_version >= '3.7' -eradicate==2.3.0 -exceptiongroup==1.2.2; python_version < '3.11' -faker==28.0.0; python_version >= '3.8' -flake8==7.1.1; python_full_version >= '3.8.1' -flake8-bandit==4.1.1; python_version >= '3.6' -flake8-broken-line==1.0.0; python_version >= '3.8' and python_version < '4.0' -flake8-bugbear==23.12.2; python_full_version >= '3.8.1' -flake8-commas==2.1.0 -flake8-comprehensions==3.15.0; python_version >= '3.8' -flake8-debugger==4.1.2; python_version >= '3.7' -flake8-docstrings==1.7.0; python_version >= '3.7' -flake8-eradicate==1.5.0; python_version >= '3.8' and python_version < '4.0' -flake8-isort==6.1.1; python_version >= '3.8' -flake8-quotes==3.4.0 -flake8-rst-docstrings==0.3.0; python_version >= '3.7' -flake8-string-format==0.3.0 -idna==3.8; python_version >= '3.6' -iniconfig==2.0.0; python_version >= '3.7' -isort==5.13.2; python_full_version >= '3.8.0' -markdown-it-py==3.0.0; python_version >= '3.8' -mccabe==0.7.0; python_version >= '3.6' -mdurl==0.1.2; python_version >= '3.7' -mypy==1.11.2; python_version >= '3.8' -mypy-extensions==1.0.0; python_version >= '3.5' -packaging==24.1; python_version >= '3.8' -pathspec==0.12.1; python_version >= '3.8' -pbr==6.1.0; python_version >= '2.6' -pep8-naming==0.13.3; python_version >= '3.7' -platformdirs==4.2.2; python_version >= '3.8' -pluggy==1.5.0; python_version >= '3.8' -pycodestyle==2.12.1; python_version >= '3.8' -pydocstyle==6.3.0; python_version >= '3.6' -pyflakes==3.2.0; python_version >= '3.8' -pygments==2.18.0; python_version >= '3.8' -pytest==8.3.2; python_version >= '3.8' -pytest-mock==3.14.0; python_version >= '3.8' -python-dateutil==2.9.0.post0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -python-dotenv==1.0.1; python_version >= '3.8' -pyyaml==6.0.2; python_version >= '3.8' -requests==2.32.3; python_version >= '3.8' -requests-mock==1.12.1; python_version >= '3.5' -restructuredtext-lint==1.4.0 -rich==13.8.0; python_full_version >= '3.7.0' -setuptools==74.0.0; python_version >= '3.8' -six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -snowballstemmer==2.2.0 -stevedore==5.3.0; python_version >= '3.8' -tomli==2.0.1; python_version < '3.11' -types-requests==2.32.0.20240712; python_version >= '3.8' -typing-extensions==4.12.2; python_version < '3.11' -urllib3==2.2.2; python_version >= '3.8' -wemake-python-styleguide==0.18.0; python_version < '4.0' and python_full_version >= '3.8.1' diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index c83493e..5f7f548 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,5 +1,4 @@ from .client import Client # NOQA -__version__ = '1.1.1' - +__version__ = "1.2.0" diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..992dbf4 --- /dev/null +++ b/uv.lock @@ -0,0 +1,1041 @@ +version = 1 +revision = 2 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version < '3.10'", +] + +[[package]] +name = "astor" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090, upload-time = "2019-12-10T01:50:35.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488, upload-time = "2019-12-10T01:50:33.628Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "bandit" +version = "1.8.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "pyyaml", marker = "python_full_version < '3.10'" }, + { name = "rich", marker = "python_full_version < '3.10'" }, + { name = "stevedore", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/a5/144a45f8e67df9d66c3bc3f7e69a39537db8bff1189ab7cff4e9459215da/bandit-1.8.3.tar.gz", hash = "sha256:f5847beb654d309422985c36644649924e0ea4425c76dec2e89110b87506193a", size = 4232005, upload-time = "2025-02-17T05:24:57.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/85/db74b9233e0aa27ec96891045c5e920a64dd5cbccd50f8e64e9460f48d35/bandit-1.8.3-py3-none-any.whl", hash = "sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8", size = 129078, upload-time = "2025-02-17T05:24:54.068Z" }, +] + +[[package]] +name = "black" +version = "25.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, + { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, + { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, + { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, + { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, + { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, + { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, + { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, + { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, + { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, + { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, + { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" }, + { url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" }, + { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" }, + { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, + { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, + { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, + { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, + { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, + { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, + { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, + { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, + { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, + { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, + { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671, upload-time = "2025-05-02T08:34:12.696Z" }, + { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, + { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, + { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, + { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, + { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436, upload-time = "2025-05-02T08:34:35.907Z" }, + { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772, upload-time = "2025-05-02T08:34:37.935Z" }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/6b/7dd06399a5c0b81007e3a6af0395cd60e6a30f959f8d407d3ee04642e896/coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a", size = 211573, upload-time = "2025-05-23T11:37:47.207Z" }, + { url = "https://files.pythonhosted.org/packages/f0/df/2b24090820a0bac1412955fb1a4dade6bc3b8dcef7b899c277ffaf16916d/coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be", size = 212006, upload-time = "2025-05-23T11:37:50.289Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c4/e4e3b998e116625562a872a342419652fa6ca73f464d9faf9f52f1aff427/coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3", size = 241128, upload-time = "2025-05-23T11:37:52.229Z" }, + { url = "https://files.pythonhosted.org/packages/b1/67/b28904afea3e87a895da850ba587439a61699bf4b73d04d0dfd99bbd33b4/coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6", size = 239026, upload-time = "2025-05-23T11:37:53.846Z" }, + { url = "https://files.pythonhosted.org/packages/8c/0f/47bf7c5630d81bc2cd52b9e13043685dbb7c79372a7f5857279cc442b37c/coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622", size = 240172, upload-time = "2025-05-23T11:37:55.711Z" }, + { url = "https://files.pythonhosted.org/packages/ba/38/af3eb9d36d85abc881f5aaecf8209383dbe0fa4cac2d804c55d05c51cb04/coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c", size = 240086, upload-time = "2025-05-23T11:37:57.724Z" }, + { url = "https://files.pythonhosted.org/packages/9e/64/c40c27c2573adeba0fe16faf39a8aa57368a1f2148865d6bb24c67eadb41/coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3", size = 238792, upload-time = "2025-05-23T11:37:59.737Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ab/b7c85146f15457671c1412afca7c25a5696d7625e7158002aa017e2d7e3c/coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404", size = 239096, upload-time = "2025-05-23T11:38:01.693Z" }, + { url = "https://files.pythonhosted.org/packages/d3/50/9446dad1310905fb1dc284d60d4320a5b25d4e3e33f9ea08b8d36e244e23/coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7", size = 214144, upload-time = "2025-05-23T11:38:03.68Z" }, + { url = "https://files.pythonhosted.org/packages/23/ed/792e66ad7b8b0df757db8d47af0c23659cdb5a65ef7ace8b111cacdbee89/coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347", size = 215043, upload-time = "2025-05-23T11:38:05.217Z" }, + { url = "https://files.pythonhosted.org/packages/6a/4d/1ff618ee9f134d0de5cc1661582c21a65e06823f41caf801aadf18811a8e/coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9", size = 211692, upload-time = "2025-05-23T11:38:08.485Z" }, + { url = "https://files.pythonhosted.org/packages/96/fa/c3c1b476de96f2bc7a8ca01a9f1fcb51c01c6b60a9d2c3e66194b2bdb4af/coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879", size = 212115, upload-time = "2025-05-23T11:38:09.989Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c2/5414c5a1b286c0f3881ae5adb49be1854ac5b7e99011501f81c8c1453065/coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a", size = 244740, upload-time = "2025-05-23T11:38:11.947Z" }, + { url = "https://files.pythonhosted.org/packages/cd/46/1ae01912dfb06a642ef3dd9cf38ed4996fda8fe884dab8952da616f81a2b/coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5", size = 242429, upload-time = "2025-05-23T11:38:13.955Z" }, + { url = "https://files.pythonhosted.org/packages/06/58/38c676aec594bfe2a87c7683942e5a30224791d8df99bcc8439fde140377/coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11", size = 244218, upload-time = "2025-05-23T11:38:15.631Z" }, + { url = "https://files.pythonhosted.org/packages/80/0c/95b1023e881ce45006d9abc250f76c6cdab7134a1c182d9713878dfefcb2/coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a", size = 243865, upload-time = "2025-05-23T11:38:17.622Z" }, + { url = "https://files.pythonhosted.org/packages/57/37/0ae95989285a39e0839c959fe854a3ae46c06610439350d1ab860bf020ac/coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb", size = 242038, upload-time = "2025-05-23T11:38:19.966Z" }, + { url = "https://files.pythonhosted.org/packages/4d/82/40e55f7c0eb5e97cc62cbd9d0746fd24e8caf57be5a408b87529416e0c70/coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54", size = 242567, upload-time = "2025-05-23T11:38:21.912Z" }, + { url = "https://files.pythonhosted.org/packages/f9/35/66a51adc273433a253989f0d9cc7aa6bcdb4855382cf0858200afe578861/coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a", size = 214194, upload-time = "2025-05-23T11:38:23.571Z" }, + { url = "https://files.pythonhosted.org/packages/f6/8f/a543121f9f5f150eae092b08428cb4e6b6d2d134152c3357b77659d2a605/coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975", size = 215109, upload-time = "2025-05-23T11:38:25.137Z" }, + { url = "https://files.pythonhosted.org/packages/77/65/6cc84b68d4f35186463cd7ab1da1169e9abb59870c0f6a57ea6aba95f861/coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53", size = 213521, upload-time = "2025-05-23T11:38:27.123Z" }, + { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, + { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, + { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, + { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, + { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, + { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, + { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, + { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, + { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, + { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, + { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, + { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, + { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, + { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, + { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, + { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, + { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, + { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, + { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, + { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, + { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, + { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, + { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, + { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, + { url = "https://files.pythonhosted.org/packages/71/1e/388267ad9c6aa126438acc1ceafede3bb746afa9872e3ec5f0691b7d5efa/coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a", size = 211566, upload-time = "2025-05-23T11:39:32.333Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a5/acc03e5cf0bba6357f5e7c676343de40fbf431bb1e115fbebf24b2f7f65e/coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d", size = 211996, upload-time = "2025-05-23T11:39:34.512Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a2/0fc0a9f6b7c24fa4f1d7210d782c38cb0d5e692666c36eaeae9a441b6755/coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca", size = 240741, upload-time = "2025-05-23T11:39:36.252Z" }, + { url = "https://files.pythonhosted.org/packages/e6/da/1c6ba2cf259710eed8916d4fd201dccc6be7380ad2b3b9f63ece3285d809/coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d", size = 238672, upload-time = "2025-05-23T11:39:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/ac/51/c8fae0dc3ca421e6e2509503696f910ff333258db672800c3bdef256265a/coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787", size = 239769, upload-time = "2025-05-23T11:39:40.24Z" }, + { url = "https://files.pythonhosted.org/packages/59/8e/b97042ae92c59f40be0c989df090027377ba53f2d6cef73c9ca7685c26a6/coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7", size = 239555, upload-time = "2025-05-23T11:39:42.3Z" }, + { url = "https://files.pythonhosted.org/packages/47/35/b8893e682d6e96b1db2af5997fc13ef62219426fb17259d6844c693c5e00/coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3", size = 237768, upload-time = "2025-05-23T11:39:44.069Z" }, + { url = "https://files.pythonhosted.org/packages/03/6c/023b0b9a764cb52d6243a4591dcb53c4caf4d7340445113a1f452bb80591/coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7", size = 238757, upload-time = "2025-05-23T11:39:46.195Z" }, + { url = "https://files.pythonhosted.org/packages/03/ed/3af7e4d721bd61a8df7de6de9e8a4271e67f3d9e086454558fd9f48eb4f6/coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a", size = 214166, upload-time = "2025-05-23T11:39:47.934Z" }, + { url = "https://files.pythonhosted.org/packages/9d/30/ee774b626773750dc6128354884652507df3c59d6aa8431526107e595227/coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e", size = 215050, upload-time = "2025-05-23T11:39:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/69/2f/572b29496d8234e4a7773200dd835a0d32d9e171f2d974f3fe04a9dbc271/coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837", size = 203636, upload-time = "2025-05-23T11:39:52.002Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, +] + +[[package]] +name = "darglint" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/2c/86e8549e349388c18ca8a4ff8661bb5347da550f598656d32a98eaaf91cc/darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da", size = 74435, upload-time = "2021-10-18T03:40:37.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/28/85d1e0396d64422c5218d68e5cdcc53153aa8a2c83c7dbc3ee1502adf3a1/darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d", size = 120767, upload-time = "2021-10-18T03:40:35.034Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "eradicate" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/e1/665186aedea2d6ebf0415cf97c0629c8123a721e7afc417deeade5598215/eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37", size = 8536, upload-time = "2023-06-09T06:31:41.814Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/c2/533e1338429aeba1f089566a2314d69d3e78ab57a73006f16a923bf2b24c/eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e", size = 6113, upload-time = "2023-06-09T06:31:40.209Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "faker" +version = "37.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/4b/5354912eaff922876323f2d07e21408b10867f3295d5f917748341cb6f53/faker-37.3.0.tar.gz", hash = "sha256:77b79e7a2228d57175133af0bbcdd26dc623df81db390ee52f5104d46c010f2f", size = 1901376, upload-time = "2025-05-14T15:24:18.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/99/045b2dae19a01b9fbb23b9971bc04f4ef808e7f3a213d08c81067304a210/faker-37.3.0-py3-none-any.whl", hash = "sha256:48c94daa16a432f2d2bc803c7ff602509699fca228d13e97e379cd860a7e216e", size = 1942203, upload-time = "2025-05-14T15:24:16.159Z" }, +] + +[[package]] +name = "flake8" +version = "7.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mccabe" }, + { name = "pycodestyle" }, + { name = "pyflakes" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/c4/5842fc9fc94584c455543540af62fd9900faade32511fab650e9891ec225/flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426", size = 48177, upload-time = "2025-03-29T20:08:39.329Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/5c/0627be4c9976d56b1217cb5187b7504e7fd7d3503f8bfd312a04077bd4f7/flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343", size = 57786, upload-time = "2025-03-29T20:08:37.902Z" }, +] + +[[package]] +name = "flake8-bandit" +version = "4.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "bandit", marker = "python_full_version < '3.10'" }, + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/1c/4f66a7a52a246d6c64312b5c40da3af3630cd60b27af81b137796af3c0bc/flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e", size = 5403, upload-time = "2022-08-29T13:48:41.225Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/5f/55bab0ac89f9ad9f4c6e38087faa80c252daec4ccb7776b4dac216ca9e3f/flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d", size = 4828, upload-time = "2022-08-29T13:48:39.737Z" }, +] + +[[package]] +name = "flake8-broken-line" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/5e/eca08446205afb79e74b6af8e227f06f0b1a26ae892708adbc4e65ccaa86/flake8_broken_line-1.0.0.tar.gz", hash = "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85", size = 3458, upload-time = "2023-05-31T10:09:11.716Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/ff/57d0101933527b5202cc9f80bc15aa85b207916c722a00e7adde0e33f413/flake8_broken_line-1.0.0-py3-none-any.whl", hash = "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9", size = 4202, upload-time = "2023-05-31T10:09:10.027Z" }, +] + +[[package]] +name = "flake8-bugbear" +version = "24.12.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "python_full_version < '3.10'" }, + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/25/48ba712ff589b0149f21135234f9bb45c14d6689acc6151b5e2ff8ac2ae9/flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64", size = 82907, upload-time = "2024-12-12T16:49:26.307Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/21/0a875f75fbe4008bd171e2fefa413536258fe6b4cfaaa087986de74588f4/flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e", size = 36664, upload-time = "2024-12-12T16:49:23.584Z" }, +] + +[[package]] +name = "flake8-commas" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/83/814bc8eb02b8883bc004384a1fb8b1f45b4a0b892e579fec7c80a9368526/flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263", size = 8484, upload-time = "2021-10-13T19:25:41.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/0d/41895badcdbbe84893b95c114d5bd4345d69c9d5645a42857f1ccb84d556/flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54", size = 7591, upload-time = "2021-10-13T19:25:39.472Z" }, +] + +[[package]] +name = "flake8-comprehensions" +version = "3.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/7d/7ffaa876ca5b330fc244287208dce1d12515b88a69488ea90ab58c94501d/flake8_comprehensions-3.16.0.tar.gz", hash = "sha256:9cbf789905a8f03f9d350fb82b17b264d9a16c7ce3542b2a7b871ef568cafabe", size = 12991, upload-time = "2024-10-27T21:51:18.029Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/bf/0cf8d3c9a233620840f209490c4907d7d416d066557396ebda678c58de09/flake8_comprehensions-3.16.0-py3-none-any.whl", hash = "sha256:7c1eadc9d22e765f39857798febe7766b4d9c519793c6c149e3e13bf99693f70", size = 8169, upload-time = "2024-10-27T21:51:16.464Z" }, +] + +[[package]] +name = "flake8-debugger" +version = "4.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "pycodestyle", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/1e/f9bdb98f3df5dceaa2287a8fb5801a22681dbd677a8759704083357e27c4/flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840", size = 7801, upload-time = "2022-04-30T16:50:55.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/6b/8e5d248949798644b3d8e5f598ed5d1da82d8f157d4bafd78f45247f1690/flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf", size = 7909, upload-time = "2022-04-30T16:50:57.294Z" }, +] + +[[package]] +name = "flake8-docstrings" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "pydocstyle", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/24/f839e3a06e18f4643ccb81370909a497297909f15106e6af2fecdef46894/flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", size = 5995, upload-time = "2023-01-25T14:27:13.903Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/7d/76a278fa43250441ed9300c344f889c7fb1817080c8fb8996b840bf421c2/flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75", size = 4994, upload-time = "2023-01-25T14:27:12.32Z" }, +] + +[[package]] +name = "flake8-eradicate" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs", marker = "python_full_version < '3.10'" }, + { name = "eradicate", marker = "python_full_version < '3.10'" }, + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/72/a3975dfa4287396e9fb8fc2b4ee94a80d0809babbf92abed5af9c8e29c95/flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6", size = 4508, upload-time = "2023-05-31T09:57:15.484Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/a9/1319b9e5eeb7d948f6db0b0ed4209bae0ec12d30ab3ee43a0ac1d8ce455f/flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22", size = 5144, upload-time = "2023-05-31T09:57:13.589Z" }, +] + +[[package]] +name = "flake8-isort" +version = "6.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "isort", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7c/ea/2f2662d4fefa6ab335c7119cb28e5bc57c935a86a69a7f72df3ea5fe7b2c/flake8_isort-6.1.2.tar.gz", hash = "sha256:9d0452acdf0e1cd6f2d6848e3605e66b54d920e73471fb4744eef0f93df62d5d", size = 17756, upload-time = "2025-01-29T12:29:25.753Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/10/295e982874f2a94f309baf7c45f852a191c87d59bd846b1701332303783f/flake8_isort-6.1.2-py3-none-any.whl", hash = "sha256:549197dedf0273502fb74f04c080beed9e62a7eb70244610413d27052e78bd3b", size = 18385, upload-time = "2025-01-29T12:29:23.46Z" }, +] + +[[package]] +name = "flake8-quotes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "setuptools", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/57/a173e3eb86072b7ee77650aca496b15d6886367d257f58ea9de5276e330a/flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c", size = 14107, upload-time = "2024-02-10T21:58:22.357Z" } + +[[package]] +name = "flake8-rst-docstrings" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "restructuredtext-lint", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/d6/a3e5f86f984d6d8caa1705deffdae84c710e594ab5c1985e26c5e1bb05db/flake8_rst_docstrings-0.3.1.tar.gz", hash = "sha256:26dcc1338caf985990677696a8a6a274f73a0c6845b85f567befd3b648db78e2", size = 12867, upload-time = "2025-04-29T11:34:56.437Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/a7/ef9a2b35987d4d4b2b6213891915d0d7242ccc034861dec63540b81f3a13/flake8_rst_docstrings-0.3.1-py3-none-any.whl", hash = "sha256:ed831afca7ee47851e2162d5fa726b823b446fd46085c2164d7979ae5d9a96d7", size = 11049, upload-time = "2025-04-29T11:34:54.861Z" }, +] + +[[package]] +name = "flake8-string-format" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/db/500e114a9ee115b03a21a2581c227fd932a0f50c4ae8fee514ef9a373cf4/flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", size = 6495, upload-time = "2020-02-16T15:27:51.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/22/e5f4ccc41dda8db61cf3bb7a93549f9ae8e1dd10547b3d71cc8483a0b437/flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af", size = 7266, upload-time = "2020-02-16T15:27:49.327Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "isort" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717, upload-time = "2025-02-05T03:50:34.655Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/f8/65a7ce8d0e09b6329ad0c8d40330d100ea343bd4dd04c4f8ae26462d0a17/mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", size = 10738433, upload-time = "2025-02-05T03:49:29.145Z" }, + { url = "https://files.pythonhosted.org/packages/b4/95/9c0ecb8eacfe048583706249439ff52105b3f552ea9c4024166c03224270/mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", size = 9861472, upload-time = "2025-02-05T03:49:16.986Z" }, + { url = "https://files.pythonhosted.org/packages/84/09/9ec95e982e282e20c0d5407bc65031dfd0f0f8ecc66b69538296e06fcbee/mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", size = 11611424, upload-time = "2025-02-05T03:49:46.908Z" }, + { url = "https://files.pythonhosted.org/packages/78/13/f7d14e55865036a1e6a0a69580c240f43bc1f37407fe9235c0d4ef25ffb0/mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", size = 12365450, upload-time = "2025-02-05T03:50:05.89Z" }, + { url = "https://files.pythonhosted.org/packages/48/e1/301a73852d40c241e915ac6d7bcd7fedd47d519246db2d7b86b9d7e7a0cb/mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", size = 12551765, upload-time = "2025-02-05T03:49:33.56Z" }, + { url = "https://files.pythonhosted.org/packages/77/ba/c37bc323ae5fe7f3f15a28e06ab012cd0b7552886118943e90b15af31195/mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", size = 9274701, upload-time = "2025-02-05T03:49:38.981Z" }, + { url = "https://files.pythonhosted.org/packages/03/bc/f6339726c627bd7ca1ce0fa56c9ae2d0144604a319e0e339bdadafbbb599/mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", size = 10662338, upload-time = "2025-02-05T03:50:17.287Z" }, + { url = "https://files.pythonhosted.org/packages/e2/90/8dcf506ca1a09b0d17555cc00cd69aee402c203911410136cd716559efe7/mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", size = 9787540, upload-time = "2025-02-05T03:49:51.21Z" }, + { url = "https://files.pythonhosted.org/packages/05/05/a10f9479681e5da09ef2f9426f650d7b550d4bafbef683b69aad1ba87457/mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", size = 11538051, upload-time = "2025-02-05T03:50:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9a/1f7d18b30edd57441a6411fcbc0c6869448d1a4bacbaee60656ac0fc29c8/mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", size = 12286751, upload-time = "2025-02-05T03:49:42.408Z" }, + { url = "https://files.pythonhosted.org/packages/72/af/19ff499b6f1dafcaf56f9881f7a965ac2f474f69f6f618b5175b044299f5/mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", size = 12421783, upload-time = "2025-02-05T03:49:07.707Z" }, + { url = "https://files.pythonhosted.org/packages/96/39/11b57431a1f686c1aed54bf794870efe0f6aeca11aca281a0bd87a5ad42c/mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", size = 9265618, upload-time = "2025-02-05T03:49:54.581Z" }, + { url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981, upload-time = "2025-02-05T03:50:28.25Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175, upload-time = "2025-02-05T03:50:13.411Z" }, + { url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675, upload-time = "2025-02-05T03:50:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020, upload-time = "2025-02-05T03:48:48.705Z" }, + { url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582, upload-time = "2025-02-05T03:49:03.628Z" }, + { url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614, upload-time = "2025-02-05T03:50:00.313Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592, upload-time = "2025-02-05T03:48:55.789Z" }, + { url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611, upload-time = "2025-02-05T03:48:44.581Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443, upload-time = "2025-02-05T03:49:25.514Z" }, + { url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541, upload-time = "2025-02-05T03:49:57.623Z" }, + { url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348, upload-time = "2025-02-05T03:48:52.361Z" }, + { url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648, upload-time = "2025-02-05T03:49:11.395Z" }, + { url = "https://files.pythonhosted.org/packages/5a/fa/79cf41a55b682794abe71372151dbbf856e3008f6767057229e6649d294a/mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078", size = 10737129, upload-time = "2025-02-05T03:50:24.509Z" }, + { url = "https://files.pythonhosted.org/packages/d3/33/dd8feb2597d648de29e3da0a8bf4e1afbda472964d2a4a0052203a6f3594/mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba", size = 9856335, upload-time = "2025-02-05T03:49:36.398Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b5/74508959c1b06b96674b364ffeb7ae5802646b32929b7701fc6b18447592/mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5", size = 11611935, upload-time = "2025-02-05T03:49:14.154Z" }, + { url = "https://files.pythonhosted.org/packages/6c/53/da61b9d9973efcd6507183fdad96606996191657fe79701b2c818714d573/mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b", size = 12365827, upload-time = "2025-02-05T03:48:59.458Z" }, + { url = "https://files.pythonhosted.org/packages/c1/72/965bd9ee89540c79a25778cc080c7e6ef40aa1eeac4d52cec7eae6eb5228/mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2", size = 12541924, upload-time = "2025-02-05T03:50:03.12Z" }, + { url = "https://files.pythonhosted.org/packages/46/d0/f41645c2eb263e6c77ada7d76f894c580c9ddb20d77f0c24d34273a4dab2/mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980", size = 9271176, upload-time = "2025-02-05T03:50:10.86Z" }, + { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload-time = "2025-02-05T03:50:08.348Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pbr" +version = "6.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/d2/510cc0d218e753ba62a1bc1434651db3cd797a9716a0a66cc714cb4f0935/pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b", size = 125702, upload-time = "2025-02-04T14:28:06.514Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/ac/684d71315abc7b1214d59304e23a982472967f6bf4bde5a98f1503f648dc/pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76", size = 108997, upload-time = "2025-02-04T14:28:03.168Z" }, +] + +[[package]] +name = "pep8-naming" +version = "0.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "flake8", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/c0/0db8b2867395a9a137e86af8bdf5a566e41d9c6453e509cd3042419ae29e/pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", size = 16129, upload-time = "2022-12-19T20:45:27.158Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4f/48/9533518e0394fb858ac2b4b55fe18f24aa33c87c943f691336ec842d9728/pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80", size = 8490, upload-time = "2022-12-19T20:45:25.132Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pycodestyle" +version = "2.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/6e/1f4a62078e4d95d82367f24e685aef3a672abfd27d1a868068fed4ed2254/pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae", size = 39312, upload-time = "2025-03-29T17:33:30.669Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/be/b00116df1bfb3e0bb5b45e29d604799f7b91dd861637e4d448b4e09e6a3e/pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9", size = 31424, upload-time = "2025-03-29T17:33:29.405Z" }, +] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/d5385ca59fd065e3c6a5fe19f9bc9d5ea7f2509fa8c9c22fb6b2031dd953/pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1", size = 36796, upload-time = "2023-01-17T20:29:19.838Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/ea/99ddefac41971acad68f14114f38261c1f27dac0b3ec529824ebc739bdaa/pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", size = 38038, upload-time = "2023-01-17T20:29:18.094Z" }, +] + +[[package]] +name = "pyflakes" +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/cc/1df338bd7ed1fa7c317081dcf29bf2f01266603b301e6858856d346a12b3/pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b", size = 64175, upload-time = "2025-03-31T13:21:20.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/40/b293a4fa769f3b02ab9e387c707c4cbdc34f073f945de0386107d4e669e6/pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a", size = 63164, upload-time = "2025-03-31T13:21:18.503Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "pytest" +version = "8.3.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, +] + +[[package]] +name = "requests-mock" +version = "1.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/32/587625f91f9a0a3d84688bf9cfc4b2480a7e8ec327cefd0ff2ac891fd2cf/requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401", size = 60901, upload-time = "2024-03-29T03:54:29.446Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/ec/889fbc557727da0c34a33850950310240f2040f3b1955175fdb2b36a8910/requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", size = 27695, upload-time = "2024-03-29T03:54:27.64Z" }, +] + +[[package]] +name = "restructuredtext-lint" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/9c/6d8035cafa2d2d314f34e6cd9313a299de095b26e96f1c7312878f988eec/restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45", size = 16723, upload-time = "2022-02-24T05:51:10.907Z" } + +[[package]] +name = "rich" +version = "14.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "stevedore" +version = "5.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pbr", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/3f/13cacea96900bbd31bb05c6b74135f85d15564fc583802be56976c940470/stevedore-5.4.1.tar.gz", hash = "sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b", size = 513858, upload-time = "2025-02-20T14:03:57.285Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/45/8c4ebc0c460e6ec38e62ab245ad3c7fc10b210116cea7c16d61602aa9558/stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe", size = 49533, upload-time = "2025-02-20T14:03:55.849Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20250515" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c1/cdc4f9b8cfd9130fbe6276db574f114541f4231fcc6fb29648289e6e3390/types_requests-2.32.0.20250515.tar.gz", hash = "sha256:09c8b63c11318cb2460813871aaa48b671002e59fda67ca909e9883777787581", size = 23012, upload-time = "2025-05-15T03:04:31.817Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/0f/68a997c73a129287785f418c1ebb6004f81e46b53b3caba88c0e03fcd04a/types_requests-2.32.0.20250515-py3-none-any.whl", hash = "sha256:f8eba93b3a892beee32643ff836993f15a785816acca21ea0ffa006f05ef0fb2", size = 20635, upload-time = "2025-05-15T03:04:30.5Z" }, +] + +[[package]] +name = "typesense" +source = { virtual = "." } +dependencies = [ + { name = "requests" }, +] + +[package.dev-dependencies] +dev = [ + { name = "black" }, + { name = "coverage" }, + { name = "faker" }, + { name = "flake8" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-mock" }, + { name = "python-dotenv" }, + { name = "requests-mock" }, + { name = "types-requests" }, + { name = "wemake-python-styleguide", version = "0.19.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "wemake-python-styleguide", version = "1.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] + +[package.metadata] +requires-dist = [{ name = "requests" }] + +[package.metadata.requires-dev] +dev = [ + { name = "black" }, + { name = "coverage" }, + { name = "faker" }, + { name = "flake8" }, + { name = "mypy" }, + { name = "pytest" }, + { name = "pytest-mock" }, + { name = "python-dotenv" }, + { name = "requests-mock" }, + { name = "types-requests" }, + { name = "wemake-python-styleguide" }, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, +] + +[[package]] +name = "wemake-python-styleguide" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "astor", marker = "python_full_version < '3.10'" }, + { name = "attrs", marker = "python_full_version < '3.10'" }, + { name = "darglint", marker = "python_full_version < '3.10'" }, + { name = "flake8", marker = "python_full_version < '3.10'" }, + { name = "flake8-bandit", marker = "python_full_version < '3.10'" }, + { name = "flake8-broken-line", marker = "python_full_version < '3.10'" }, + { name = "flake8-bugbear", marker = "python_full_version < '3.10'" }, + { name = "flake8-commas", marker = "python_full_version < '3.10'" }, + { name = "flake8-comprehensions", marker = "python_full_version < '3.10'" }, + { name = "flake8-debugger", marker = "python_full_version < '3.10'" }, + { name = "flake8-docstrings", marker = "python_full_version < '3.10'" }, + { name = "flake8-eradicate", marker = "python_full_version < '3.10'" }, + { name = "flake8-isort", marker = "python_full_version < '3.10'" }, + { name = "flake8-quotes", marker = "python_full_version < '3.10'" }, + { name = "flake8-rst-docstrings", marker = "python_full_version < '3.10'" }, + { name = "flake8-string-format", marker = "python_full_version < '3.10'" }, + { name = "pep8-naming", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "setuptools", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/f4/2a76c59661fae8534b81e992a37d347de241b242aaf5bc651b10d24b7025/wemake_python_styleguide-0.19.2.tar.gz", hash = "sha256:850fe70e6d525fd37ac51778e552a121a489f1bd057184de96ffd74a09aef414", size = 168472, upload-time = "2024-03-26T15:47:38.412Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/c4/0e36d00c88e995f2a0e5de8c61bb130a4acdc1b458b6bf8c7a474b127890/wemake_python_styleguide-0.19.2-py3-none-any.whl", hash = "sha256:d53205dbb629755026d853d15fb3ca03ebb2717c97de4198b5676b9bdc0663bd", size = 224081, upload-time = "2024-03-26T15:47:35.767Z" }, +] + +[[package]] +name = "wemake-python-styleguide" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "attrs", marker = "python_full_version >= '3.10'" }, + { name = "flake8", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/4f/8230334498305252c855bce55bdece636af0dac908b80d248f2cc86ba6e8/wemake_python_styleguide-1.1.0.tar.gz", hash = "sha256:a9086e4867560c06fe47deb2101c72d1a1fd7ecb7a3235b297b6e02e9298e71e", size = 154305, upload-time = "2025-03-25T10:33:44.203Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/ae0c4888ed51f3907ab3732991e386a4200d493af69b5eae68c9223bdc3e/wemake_python_styleguide-1.1.0-py3-none-any.whl", hash = "sha256:32644cf35f6cd4c49c2d36e7b10336f8fe105250ba79365e27c5fa648bfc0616", size = 215695, upload-time = "2025-03-25T10:33:42.781Z" }, +] From c5b7ae60a43face3eda8a8f4deaf80ee7f655b15 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:20:11 +0300 Subject: [PATCH 247/288] chore: add ruff --- pyproject.toml | 1 + requirements-dev.txt | 2 ++ uv.lock | 27 +++++++++++++++++++++++++++ 3 files changed, 30 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 9115e67..ac22219 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ dev = [ "python-dotenv", "types-requests", "faker", + "ruff>=0.11.11", ] [tool.uv] diff --git a/requirements-dev.txt b/requirements-dev.txt index fb6c8cf..17b19fc 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -60,6 +60,8 @@ requests==2.32.3 # requests-mock requests-mock==1.12.1 # via typesense (pyproject.toml:dev) +ruff==0.11.11 + # via typesense (pyproject.toml:dev) types-requests==2.32.0.20250515 # via typesense (pyproject.toml:dev) typing-extensions==4.13.2 diff --git a/uv.lock b/uv.lock index 992dbf4..d7e41c3 100644 --- a/uv.lock +++ b/uv.lock @@ -840,6 +840,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] +[[package]] +name = "ruff" +version = "0.11.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/53/ae4857030d59286924a8bdb30d213d6ff22d8f0957e738d0289990091dd8/ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d", size = 4186707, upload-time = "2025-05-22T19:19:34.363Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/14/f2326676197bab099e2a24473158c21656fbf6a207c65f596ae15acb32b9/ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092", size = 10229049, upload-time = "2025-05-22T19:18:45.516Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f3/bff7c92dd66c959e711688b2e0768e486bbca46b2f35ac319bb6cce04447/ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4", size = 11053601, upload-time = "2025-05-22T19:18:49.269Z" }, + { url = "https://files.pythonhosted.org/packages/e2/38/8e1a3efd0ef9d8259346f986b77de0f62c7a5ff4a76563b6b39b68f793b9/ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd", size = 10367421, upload-time = "2025-05-22T19:18:51.754Z" }, + { url = "https://files.pythonhosted.org/packages/b4/50/557ad9dd4fb9d0bf524ec83a090a3932d284d1a8b48b5906b13b72800e5f/ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6", size = 10581980, upload-time = "2025-05-22T19:18:54.011Z" }, + { url = "https://files.pythonhosted.org/packages/c4/b2/e2ed82d6e2739ece94f1bdbbd1d81b712d3cdaf69f0a1d1f1a116b33f9ad/ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4", size = 10089241, upload-time = "2025-05-22T19:18:56.041Z" }, + { url = "https://files.pythonhosted.org/packages/3d/9f/b4539f037a5302c450d7c695c82f80e98e48d0d667ecc250e6bdeb49b5c3/ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac", size = 11699398, upload-time = "2025-05-22T19:18:58.248Z" }, + { url = "https://files.pythonhosted.org/packages/61/fb/32e029d2c0b17df65e6eaa5ce7aea5fbeaed22dddd9fcfbbf5fe37c6e44e/ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709", size = 12427955, upload-time = "2025-05-22T19:19:00.981Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e3/160488dbb11f18c8121cfd588e38095ba779ae208292765972f7732bfd95/ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8", size = 12069803, upload-time = "2025-05-22T19:19:03.258Z" }, + { url = "https://files.pythonhosted.org/packages/ff/16/3b006a875f84b3d0bff24bef26b8b3591454903f6f754b3f0a318589dcc3/ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b", size = 11242630, upload-time = "2025-05-22T19:19:05.871Z" }, + { url = "https://files.pythonhosted.org/packages/65/0d/0338bb8ac0b97175c2d533e9c8cdc127166de7eb16d028a43c5ab9e75abd/ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875", size = 11507310, upload-time = "2025-05-22T19:19:08.584Z" }, + { url = "https://files.pythonhosted.org/packages/6f/bf/d7130eb26174ce9b02348b9f86d5874eafbf9f68e5152e15e8e0a392e4a3/ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1", size = 10441144, upload-time = "2025-05-22T19:19:13.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/f3/4be2453b258c092ff7b1761987cf0749e70ca1340cd1bfb4def08a70e8d8/ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81", size = 10081987, upload-time = "2025-05-22T19:19:15.821Z" }, + { url = "https://files.pythonhosted.org/packages/6c/6e/dfa4d2030c5b5c13db158219f2ec67bf333e8a7748dccf34cfa2a6ab9ebc/ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639", size = 11073922, upload-time = "2025-05-22T19:19:18.104Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f4/f7b0b0c3d32b593a20ed8010fa2c1a01f2ce91e79dda6119fcc51d26c67b/ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345", size = 11568537, upload-time = "2025-05-22T19:19:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d2/46/0e892064d0adc18bcc81deed9aaa9942a27fd2cd9b1b7791111ce468c25f/ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112", size = 10536492, upload-time = "2025-05-22T19:19:23.642Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d9/232e79459850b9f327e9f1dc9c047a2a38a6f9689e1ec30024841fc4416c/ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f", size = 11612562, upload-time = "2025-05-22T19:19:27.013Z" }, + { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951, upload-time = "2025-05-22T19:19:30.043Z" }, +] + [[package]] name = "setuptools" version = "80.9.0" @@ -939,6 +964,7 @@ dev = [ { name = "pytest-mock" }, { name = "python-dotenv" }, { name = "requests-mock" }, + { name = "ruff" }, { name = "types-requests" }, { name = "wemake-python-styleguide", version = "0.19.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "wemake-python-styleguide", version = "1.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -958,6 +984,7 @@ dev = [ { name = "pytest-mock" }, { name = "python-dotenv" }, { name = "requests-mock" }, + { name = "ruff", specifier = ">=0.11.11" }, { name = "types-requests" }, { name = "wemake-python-styleguide" }, ] From 75ab531b478f4c99c816982c75c5a065cccd71d7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:33:39 +0300 Subject: [PATCH 248/288] build: configure ruff --- ruff.toml | 67 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 ruff.toml diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..25a54ba --- /dev/null +++ b/ruff.toml @@ -0,0 +1,67 @@ +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +line-length = 88 +indent-width = 4 + +target-version = "py39" + +[lint] +select = ["E4", "E7", "E9", "F", "B"] + +ignore = ["E501"] + +fixable = ["ALL"] +unfixable = ["B"] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +quote-style = "double" + +indent-style = "space" + +skip-magic-trailing-comma = false + +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = true + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" From 3ad53ccc5b5544411e850fefbc81e8dde488caf5 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:33:59 +0300 Subject: [PATCH 249/288] chore: remove uneeded deps after ruff --- pyproject.toml | 3 - requirements-dev.txt | 32 +-- uv.lock | 578 ------------------------------------------- 3 files changed, 2 insertions(+), 611 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ac22219..0d5109f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,9 +31,6 @@ build-backend = "setuptools.build_meta" [dependency-groups] dev = [ "mypy", - "wemake-python-styleguide", - "flake8", - "black", "pytest", "coverage", "pytest-mock", diff --git a/requirements-dev.txt b/requirements-dev.txt index 17b19fc..034bdec 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,51 +1,25 @@ # This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml --group dev -o requirements-dev.txt -attrs==25.3.0 - # via wemake-python-styleguide -black==25.1.0 - # via typesense (pyproject.toml:dev) certifi==2025.4.26 # via requests charset-normalizer==3.4.2 # via requests -click==8.2.1 - # via black coverage==7.8.2 # via typesense (pyproject.toml:dev) faker==37.3.0 # via typesense (pyproject.toml:dev) -flake8==7.2.0 - # via - # typesense (pyproject.toml:dev) - # wemake-python-styleguide idna==3.10 # via requests iniconfig==2.1.0 # via pytest -mccabe==0.7.0 - # via flake8 mypy==1.15.0 # via typesense (pyproject.toml:dev) mypy-extensions==1.1.0 - # via - # black - # mypy + # via mypy packaging==25.0 - # via - # black - # pytest -pathspec==0.12.1 - # via black -platformdirs==4.3.8 - # via black + # via pytest pluggy==1.6.0 # via pytest -pycodestyle==2.13.0 - # via flake8 -pyflakes==3.3.2 - # via flake8 -pygments==2.19.1 - # via wemake-python-styleguide pytest==8.3.5 # via # typesense (pyproject.toml:dev) @@ -72,5 +46,3 @@ urllib3==2.4.0 # via # requests # types-requests -wemake-python-styleguide==1.1.0 - # via typesense (pyproject.toml:dev) diff --git a/uv.lock b/uv.lock index d7e41c3..c589e65 100644 --- a/uv.lock +++ b/uv.lock @@ -6,78 +6,6 @@ resolution-markers = [ "python_full_version < '3.10'", ] -[[package]] -name = "astor" -version = "0.8.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090, upload-time = "2019-12-10T01:50:35.51Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488, upload-time = "2019-12-10T01:50:33.628Z" }, -] - -[[package]] -name = "attrs" -version = "25.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, -] - -[[package]] -name = "bandit" -version = "1.8.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "pyyaml", marker = "python_full_version < '3.10'" }, - { name = "rich", marker = "python_full_version < '3.10'" }, - { name = "stevedore", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1a/a5/144a45f8e67df9d66c3bc3f7e69a39537db8bff1189ab7cff4e9459215da/bandit-1.8.3.tar.gz", hash = "sha256:f5847beb654d309422985c36644649924e0ea4425c76dec2e89110b87506193a", size = 4232005, upload-time = "2025-02-17T05:24:57.031Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/85/db74b9233e0aa27ec96891045c5e920a64dd5cbccd50f8e64e9460f48d35/bandit-1.8.3-py3-none-any.whl", hash = "sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8", size = 129078, upload-time = "2025-02-17T05:24:54.068Z" }, -] - -[[package]] -name = "black" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, - { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, - { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, - { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, - { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, - { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, - { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, - { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, - { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, - { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, - { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, - { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, - { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, - { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" }, - { url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" }, - { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, - { url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" }, - { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, -] - [[package]] name = "certifi" version = "2025.4.26" @@ -161,36 +89,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, ] -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, -] - -[[package]] -name = "click" -version = "8.2.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -274,33 +172,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, ] -[[package]] -name = "darglint" -version = "1.8.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/2c/86e8549e349388c18ca8a4ff8661bb5347da550f598656d32a98eaaf91cc/darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da", size = 74435, upload-time = "2021-10-18T03:40:37.283Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/28/85d1e0396d64422c5218d68e5cdcc53153aa8a2c83c7dbc3ee1502adf3a1/darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d", size = 120767, upload-time = "2021-10-18T03:40:35.034Z" }, -] - -[[package]] -name = "docutils" -version = "0.21.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, -] - -[[package]] -name = "eradicate" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7a/e1/665186aedea2d6ebf0415cf97c0629c8123a721e7afc417deeade5598215/eradicate-2.3.0.tar.gz", hash = "sha256:06df115be3b87d0fc1c483db22a2ebb12bcf40585722810d809cc770f5031c37", size = 8536, upload-time = "2023-06-09T06:31:41.814Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/c2/533e1338429aeba1f089566a2314d69d3e78ab57a73006f16a923bf2b24c/eradicate-2.3.0-py3-none-any.whl", hash = "sha256:2b29b3dd27171f209e4ddd8204b70c02f0682ae95eecb353f10e8d72b149c63e", size = 6113, upload-time = "2023-06-09T06:31:40.209Z" }, -] - [[package]] name = "exceptiongroup" version = "1.3.0" @@ -325,171 +196,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/99/045b2dae19a01b9fbb23b9971bc04f4ef808e7f3a213d08c81067304a210/faker-37.3.0-py3-none-any.whl", hash = "sha256:48c94daa16a432f2d2bc803c7ff602509699fca228d13e97e379cd860a7e216e", size = 1942203, upload-time = "2025-05-14T15:24:16.159Z" }, ] -[[package]] -name = "flake8" -version = "7.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mccabe" }, - { name = "pycodestyle" }, - { name = "pyflakes" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/c4/5842fc9fc94584c455543540af62fd9900faade32511fab650e9891ec225/flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426", size = 48177, upload-time = "2025-03-29T20:08:39.329Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/83/5c/0627be4c9976d56b1217cb5187b7504e7fd7d3503f8bfd312a04077bd4f7/flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343", size = 57786, upload-time = "2025-03-29T20:08:37.902Z" }, -] - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bandit", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/77/1c/4f66a7a52a246d6c64312b5c40da3af3630cd60b27af81b137796af3c0bc/flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e", size = 5403, upload-time = "2022-08-29T13:48:41.225Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/5f/55bab0ac89f9ad9f4c6e38087faa80c252daec4ccb7776b4dac216ca9e3f/flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d", size = 4828, upload-time = "2022-08-29T13:48:39.737Z" }, -] - -[[package]] -name = "flake8-broken-line" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/30/5e/eca08446205afb79e74b6af8e227f06f0b1a26ae892708adbc4e65ccaa86/flake8_broken_line-1.0.0.tar.gz", hash = "sha256:e2c6a17f8d9a129e99c1320fce89b33843e2963871025c4c2bb7b8b8d8732a85", size = 3458, upload-time = "2023-05-31T10:09:11.716Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/ff/57d0101933527b5202cc9f80bc15aa85b207916c722a00e7adde0e33f413/flake8_broken_line-1.0.0-py3-none-any.whl", hash = "sha256:96c964336024a5030dc536a9f6fb02aa679e2d2a6b35b80a558b5136c35832a9", size = 4202, upload-time = "2023-05-31T10:09:10.027Z" }, -] - -[[package]] -name = "flake8-bugbear" -version = "24.12.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/25/48ba712ff589b0149f21135234f9bb45c14d6689acc6151b5e2ff8ac2ae9/flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64", size = 82907, upload-time = "2024-12-12T16:49:26.307Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/21/0a875f75fbe4008bd171e2fefa413536258fe6b4cfaaa087986de74588f4/flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e", size = 36664, upload-time = "2024-12-12T16:49:23.584Z" }, -] - -[[package]] -name = "flake8-commas" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0e/83/814bc8eb02b8883bc004384a1fb8b1f45b4a0b892e579fec7c80a9368526/flake8-commas-2.1.0.tar.gz", hash = "sha256:940441ab8ee544df564ae3b3f49f20462d75d5c7cac2463e0b27436e2050f263", size = 8484, upload-time = "2021-10-13T19:25:41.6Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/0d/41895badcdbbe84893b95c114d5bd4345d69c9d5645a42857f1ccb84d556/flake8_commas-2.1.0-py2.py3-none-any.whl", hash = "sha256:ebb96c31e01d0ef1d0685a21f3f0e2f8153a0381430e748bf0bbbb5d5b453d54", size = 7591, upload-time = "2021-10-13T19:25:39.472Z" }, -] - -[[package]] -name = "flake8-comprehensions" -version = "3.16.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/7d/7ffaa876ca5b330fc244287208dce1d12515b88a69488ea90ab58c94501d/flake8_comprehensions-3.16.0.tar.gz", hash = "sha256:9cbf789905a8f03f9d350fb82b17b264d9a16c7ce3542b2a7b871ef568cafabe", size = 12991, upload-time = "2024-10-27T21:51:18.029Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bf/0cf8d3c9a233620840f209490c4907d7d416d066557396ebda678c58de09/flake8_comprehensions-3.16.0-py3-none-any.whl", hash = "sha256:7c1eadc9d22e765f39857798febe7766b4d9c519793c6c149e3e13bf99693f70", size = 8169, upload-time = "2024-10-27T21:51:16.464Z" }, -] - -[[package]] -name = "flake8-debugger" -version = "4.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pycodestyle", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1f/1e/f9bdb98f3df5dceaa2287a8fb5801a22681dbd677a8759704083357e27c4/flake8-debugger-4.1.2.tar.gz", hash = "sha256:52b002560941e36d9bf806fca2523dc7fb8560a295d5f1a6e15ac2ded7a73840", size = 7801, upload-time = "2022-04-30T16:50:55.71Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/6b/8e5d248949798644b3d8e5f598ed5d1da82d8f157d4bafd78f45247f1690/flake8_debugger-4.1.2-py3-none-any.whl", hash = "sha256:0a5e55aeddcc81da631ad9c8c366e7318998f83ff00985a49e6b3ecf61e571bf", size = 7909, upload-time = "2022-04-30T16:50:57.294Z" }, -] - -[[package]] -name = "flake8-docstrings" -version = "1.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pydocstyle", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/93/24/f839e3a06e18f4643ccb81370909a497297909f15106e6af2fecdef46894/flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af", size = 5995, upload-time = "2023-01-25T14:27:13.903Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/7d/76a278fa43250441ed9300c344f889c7fb1817080c8fb8996b840bf421c2/flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75", size = 4994, upload-time = "2023-01-25T14:27:12.32Z" }, -] - -[[package]] -name = "flake8-eradicate" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "eradicate", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/72/a3975dfa4287396e9fb8fc2b4ee94a80d0809babbf92abed5af9c8e29c95/flake8_eradicate-1.5.0.tar.gz", hash = "sha256:aee636cb9ecb5594a7cd92d67ad73eb69909e5cc7bd81710cf9d00970f3983a6", size = 4508, upload-time = "2023-05-31T09:57:15.484Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/a9/1319b9e5eeb7d948f6db0b0ed4209bae0ec12d30ab3ee43a0ac1d8ce455f/flake8_eradicate-1.5.0-py3-none-any.whl", hash = "sha256:18acc922ad7de623f5247c7d5595da068525ec5437dd53b22ec2259b96ce9d22", size = 5144, upload-time = "2023-05-31T09:57:13.589Z" }, -] - -[[package]] -name = "flake8-isort" -version = "6.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "isort", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7c/ea/2f2662d4fefa6ab335c7119cb28e5bc57c935a86a69a7f72df3ea5fe7b2c/flake8_isort-6.1.2.tar.gz", hash = "sha256:9d0452acdf0e1cd6f2d6848e3605e66b54d920e73471fb4744eef0f93df62d5d", size = 17756, upload-time = "2025-01-29T12:29:25.753Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/10/295e982874f2a94f309baf7c45f852a191c87d59bd846b1701332303783f/flake8_isort-6.1.2-py3-none-any.whl", hash = "sha256:549197dedf0273502fb74f04c080beed9e62a7eb70244610413d27052e78bd3b", size = 18385, upload-time = "2025-01-29T12:29:23.46Z" }, -] - -[[package]] -name = "flake8-quotes" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "setuptools", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dd/57/a173e3eb86072b7ee77650aca496b15d6886367d257f58ea9de5276e330a/flake8-quotes-3.4.0.tar.gz", hash = "sha256:aad8492fb710a2d3eabe68c5f86a1428de650c8484127e14c43d0504ba30276c", size = 14107, upload-time = "2024-02-10T21:58:22.357Z" } - -[[package]] -name = "flake8-rst-docstrings" -version = "0.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "restructuredtext-lint", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/18/d6/a3e5f86f984d6d8caa1705deffdae84c710e594ab5c1985e26c5e1bb05db/flake8_rst_docstrings-0.3.1.tar.gz", hash = "sha256:26dcc1338caf985990677696a8a6a274f73a0c6845b85f567befd3b648db78e2", size = 12867, upload-time = "2025-04-29T11:34:56.437Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/a7/ef9a2b35987d4d4b2b6213891915d0d7242ccc034861dec63540b81f3a13/flake8_rst_docstrings-0.3.1-py3-none-any.whl", hash = "sha256:ed831afca7ee47851e2162d5fa726b823b446fd46085c2164d7979ae5d9a96d7", size = 11049, upload-time = "2025-04-29T11:34:54.861Z" }, -] - -[[package]] -name = "flake8-string-format" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/db/500e114a9ee115b03a21a2581c227fd932a0f50c4ae8fee514ef9a373cf4/flake8-string-format-0.3.0.tar.gz", hash = "sha256:65f3da786a1461ef77fca3780b314edb2853c377f2e35069723348c8917deaa2", size = 6495, upload-time = "2020-02-16T15:27:51.045Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/22/e5f4ccc41dda8db61cf3bb7a93549f9ae8e1dd10547b3d71cc8483a0b437/flake8_string_format-0.3.0-py2.py3-none-any.whl", hash = "sha256:812ff431f10576a74c89be4e85b8e075a705be39bc40c4b4278b5b13e2afa9af", size = 7266, upload-time = "2020-02-16T15:27:49.327Z" }, -] - [[package]] name = "idna" version = "3.10" @@ -508,45 +214,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] -[[package]] -name = "isort" -version = "6.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, -] - [[package]] name = "mypy" version = "1.15.0" @@ -609,48 +276,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, -] - -[[package]] -name = "pbr" -version = "6.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "setuptools", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/01/d2/510cc0d218e753ba62a1bc1434651db3cd797a9716a0a66cc714cb4f0935/pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b", size = 125702, upload-time = "2025-02-04T14:28:06.514Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/ac/684d71315abc7b1214d59304e23a982472967f6bf4bde5a98f1503f648dc/pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76", size = 108997, upload-time = "2025-02-04T14:28:03.168Z" }, -] - -[[package]] -name = "pep8-naming" -version = "0.13.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "flake8", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/c0/0db8b2867395a9a137e86af8bdf5a566e41d9c6453e509cd3042419ae29e/pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971", size = 16129, upload-time = "2022-12-19T20:45:27.158Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/48/9533518e0394fb858ac2b4b55fe18f24aa33c87c943f691336ec842d9728/pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80", size = 8490, upload-time = "2022-12-19T20:45:25.132Z" }, -] - -[[package]] -name = "platformdirs" -version = "4.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" @@ -660,45 +285,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] -[[package]] -name = "pycodestyle" -version = "2.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/6e/1f4a62078e4d95d82367f24e685aef3a672abfd27d1a868068fed4ed2254/pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae", size = 39312, upload-time = "2025-03-29T17:33:30.669Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/be/b00116df1bfb3e0bb5b45e29d604799f7b91dd861637e4d448b4e09e6a3e/pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9", size = 31424, upload-time = "2025-03-29T17:33:29.405Z" }, -] - -[[package]] -name = "pydocstyle" -version = "6.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/d5385ca59fd065e3c6a5fe19f9bc9d5ea7f2509fa8c9c22fb6b2031dd953/pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1", size = 36796, upload-time = "2023-01-17T20:29:19.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/ea/99ddefac41971acad68f14114f38261c1f27dac0b3ec529824ebc739bdaa/pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019", size = 38038, upload-time = "2023-01-17T20:29:18.094Z" }, -] - -[[package]] -name = "pyflakes" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/cc/1df338bd7ed1fa7c317081dcf29bf2f01266603b301e6858856d346a12b3/pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b", size = 64175, upload-time = "2025-03-31T13:21:20.34Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/40/b293a4fa769f3b02ab9e387c707c4cbdc34f073f945de0386107d4e669e6/pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a", size = 63164, upload-time = "2025-03-31T13:21:18.503Z" }, -] - -[[package]] -name = "pygments" -version = "2.19.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, -] - [[package]] name = "pytest" version = "8.3.5" @@ -737,59 +323,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, ] -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, - { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, - { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, - { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, - { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, - { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, -] - [[package]] name = "requests" version = "2.32.3" @@ -817,29 +350,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/97/ec/889fbc557727da0c34a33850950310240f2040f3b1955175fdb2b36a8910/requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", size = 27695, upload-time = "2024-03-29T03:54:27.64Z" }, ] -[[package]] -name = "restructuredtext-lint" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/9c/6d8035cafa2d2d314f34e6cd9313a299de095b26e96f1c7312878f988eec/restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45", size = 16723, upload-time = "2022-02-24T05:51:10.907Z" } - -[[package]] -name = "rich" -version = "14.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, -] - [[package]] name = "ruff" version = "0.11.11" @@ -865,36 +375,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/eb/09c132cff3cc30b2e7244191dcce69437352d6d6709c0adf374f3e6f476e/ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b", size = 10735951, upload-time = "2025-05-22T19:19:30.043Z" }, ] -[[package]] -name = "setuptools" -version = "80.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, -] - -[[package]] -name = "snowballstemmer" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, -] - -[[package]] -name = "stevedore" -version = "5.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pbr", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/3f/13cacea96900bbd31bb05c6b74135f85d15564fc583802be56976c940470/stevedore-5.4.1.tar.gz", hash = "sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b", size = 513858, upload-time = "2025-02-20T14:03:57.285Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/45/8c4ebc0c460e6ec38e62ab245ad3c7fc10b210116cea7c16d61602aa9558/stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe", size = 49533, upload-time = "2025-02-20T14:03:55.849Z" }, -] - [[package]] name = "tomli" version = "2.2.1" @@ -955,10 +435,8 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "black" }, { name = "coverage" }, { name = "faker" }, - { name = "flake8" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-mock" }, @@ -966,8 +444,6 @@ dev = [ { name = "requests-mock" }, { name = "ruff" }, { name = "types-requests" }, - { name = "wemake-python-styleguide", version = "0.19.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "wemake-python-styleguide", version = "1.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] [package.metadata] @@ -975,10 +451,8 @@ requires-dist = [{ name = "requests" }] [package.metadata.requires-dev] dev = [ - { name = "black" }, { name = "coverage" }, { name = "faker" }, - { name = "flake8" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-mock" }, @@ -986,7 +460,6 @@ dev = [ { name = "requests-mock" }, { name = "ruff", specifier = ">=0.11.11" }, { name = "types-requests" }, - { name = "wemake-python-styleguide" }, ] [[package]] @@ -1015,54 +488,3 @@ sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e wheels = [ { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, ] - -[[package]] -name = "wemake-python-styleguide" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "astor", marker = "python_full_version < '3.10'" }, - { name = "attrs", marker = "python_full_version < '3.10'" }, - { name = "darglint", marker = "python_full_version < '3.10'" }, - { name = "flake8", marker = "python_full_version < '3.10'" }, - { name = "flake8-bandit", marker = "python_full_version < '3.10'" }, - { name = "flake8-broken-line", marker = "python_full_version < '3.10'" }, - { name = "flake8-bugbear", marker = "python_full_version < '3.10'" }, - { name = "flake8-commas", marker = "python_full_version < '3.10'" }, - { name = "flake8-comprehensions", marker = "python_full_version < '3.10'" }, - { name = "flake8-debugger", marker = "python_full_version < '3.10'" }, - { name = "flake8-docstrings", marker = "python_full_version < '3.10'" }, - { name = "flake8-eradicate", marker = "python_full_version < '3.10'" }, - { name = "flake8-isort", marker = "python_full_version < '3.10'" }, - { name = "flake8-quotes", marker = "python_full_version < '3.10'" }, - { name = "flake8-rst-docstrings", marker = "python_full_version < '3.10'" }, - { name = "flake8-string-format", marker = "python_full_version < '3.10'" }, - { name = "pep8-naming", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "setuptools", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c2/f4/2a76c59661fae8534b81e992a37d347de241b242aaf5bc651b10d24b7025/wemake_python_styleguide-0.19.2.tar.gz", hash = "sha256:850fe70e6d525fd37ac51778e552a121a489f1bd057184de96ffd74a09aef414", size = 168472, upload-time = "2024-03-26T15:47:38.412Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/c4/0e36d00c88e995f2a0e5de8c61bb130a4acdc1b458b6bf8c7a474b127890/wemake_python_styleguide-0.19.2-py3-none-any.whl", hash = "sha256:d53205dbb629755026d853d15fb3ca03ebb2717c97de4198b5676b9bdc0663bd", size = 224081, upload-time = "2024-03-26T15:47:35.767Z" }, -] - -[[package]] -name = "wemake-python-styleguide" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.10'", -] -dependencies = [ - { name = "attrs", marker = "python_full_version >= '3.10'" }, - { name = "flake8", marker = "python_full_version >= '3.10'" }, - { name = "pygments", marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cd/4f/8230334498305252c855bce55bdece636af0dac908b80d248f2cc86ba6e8/wemake_python_styleguide-1.1.0.tar.gz", hash = "sha256:a9086e4867560c06fe47deb2101c72d1a1fd7ecb7a3235b297b6e02e9298e71e", size = 154305, upload-time = "2025-03-25T10:33:44.203Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4a/ae0c4888ed51f3907ab3732991e386a4200d493af69b5eae68c9223bdc3e/wemake_python_styleguide-1.1.0-py3-none-any.whl", hash = "sha256:32644cf35f6cd4c49c2d36e7b10336f8fe105250ba79365e27c5fa648bfc0616", size = 215695, upload-time = "2025-03-25T10:33:42.781Z" }, -] From e3aea3cef3b6f6ea546d770e36f9708e9440be7a Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:42:36 +0300 Subject: [PATCH 250/288] build: remove EOL 3.8 version from CI --- .github/workflows/release.yml | 2 +- .github/workflows/test-and-lint.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 98c956e..047f115 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.8" + python-version: "3.9" cache: pip - name: Install build dependencies diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index 67ede67..3bdbad2 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] services: typesense: image: typesense/typesense:28.0 From fe4640823ae91594ae64b8e49dfe0e1ba6b68132 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:42:48 +0300 Subject: [PATCH 251/288] chore: add isort to dev deps --- pyproject.toml | 1 + uv.lock | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 0d5109f..1f26b2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ dev = [ "types-requests", "faker", "ruff>=0.11.11", + "isort>=6.0.1", ] [tool.uv] diff --git a/uv.lock b/uv.lock index c589e65..0846166 100644 --- a/uv.lock +++ b/uv.lock @@ -214,6 +214,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "isort" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, +] + [[package]] name = "mypy" version = "1.15.0" @@ -437,6 +446,7 @@ dependencies = [ dev = [ { name = "coverage" }, { name = "faker" }, + { name = "isort" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-mock" }, @@ -453,6 +463,7 @@ requires-dist = [{ name = "requests" }] dev = [ { name = "coverage" }, { name = "faker" }, + { name = "isort", specifier = ">=6.0.1" }, { name = "mypy" }, { name = "pytest" }, { name = "pytest-mock" }, From 33a767c972d5492ec788cc4c8904a2efffb21be1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:43:05 +0300 Subject: [PATCH 252/288] fix: lint exception raising --- src/typesense/stemming_dictionaries.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/typesense/stemming_dictionaries.py b/src/typesense/stemming_dictionaries.py index 01471f1..08ef414 100644 --- a/src/typesense/stemming_dictionaries.py +++ b/src/typesense/stemming_dictionaries.py @@ -152,8 +152,8 @@ def _parse_response( for line in response.split("\n"): try: decoded = json.loads(line) - except json.JSONDecodeError: - raise ValueError(f"Failed to parse JSON from response: {line}") + except json.JSONDecodeError as err: + raise ValueError(f"Failed to parse JSON from response: {line}") from err object_list.append(decoded) return object_list From 82893354b0b33a34d450c6dbae5ee36bb30082d7 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Tue, 27 May 2025 16:47:29 +0300 Subject: [PATCH 253/288] ci: use uv on ci instead of pip --- .github/workflows/test-and-lint.yml | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index 3bdbad2..678254e 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -31,27 +31,25 @@ jobs: - name: Wait for Typesense run: | timeout 20 bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8108/health)" != "200" ]]; do sleep 1; done' || false + - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + - name: Install uv and set the python version + uses: astral-sh/setup-uv@v5 with: python-version: ${{ matrix.python-version }} - cache: 'pip' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements/dev.txt + - name: Install the project + run: uv sync --locked --all-extras --dev - - name: Lint with Flake8 + - name: Lint with Ruff run: | - flake8 src/typesense + uv run ruff check src/typesense - name: Check types with mypy run: | - mypy src/typesense + uv run mypy src/typesense - name: Run tests and coverage (excluding OpenAI) run: | - coverage run -m pytest -m "not open_ai" + uv run coverage run -m pytest -m "not open_ai" From cb27c744ee25bb788943ba9f21ffe474d363e5c1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 2 Jul 2025 13:42:11 +0300 Subject: [PATCH 254/288] feat(client): add nl search models functionality - add `NLSearchModel` class for individual model operations (retrieve, update, delete) - add `NLSearchModels` class for collection operations (create, retrieve, __getitem__) - add type definitions in `types/nl_search_model.py` with support for multiple llm providers - integrate `nl_search_models` into main `Client` class with proper imports - add test fixtures and tests for both individual and collection operations --- src/typesense/client.py | 2 + src/typesense/nl_search_model.py | 108 ++++++++++++++++ src/typesense/nl_search_models.py | 117 +++++++++++++++++ src/typesense/types/nl_search_model.py | 140 +++++++++++++++++++++ tests/fixtures/nl_search_model_fixtures.py | 78 ++++++++++++ tests/nl_search_model_test.py | 99 +++++++++++++++ tests/nl_search_models_test.py | 117 +++++++++++++++++ 7 files changed, 661 insertions(+) create mode 100644 src/typesense/nl_search_model.py create mode 100644 src/typesense/nl_search_models.py create mode 100644 src/typesense/types/nl_search_model.py create mode 100644 tests/fixtures/nl_search_model_fixtures.py create mode 100644 tests/nl_search_model_test.py create mode 100644 tests/nl_search_models_test.py diff --git a/src/typesense/client.py b/src/typesense/client.py index cde957b..f60acd0 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -46,6 +46,7 @@ from typesense.keys import Keys from typesense.metrics import Metrics from typesense.multi_search import MultiSearch +from typesense.nl_search_models import NLSearchModels from typesense.operations import Operations from typesense.stemming import Stemming from typesense.stopwords import Stopwords @@ -107,6 +108,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.stopwords = Stopwords(self.api_call) self.metrics = Metrics(self.api_call) self.conversations_models = ConversationsModels(self.api_call) + self.nl_search_models = NLSearchModels(self.api_call) def typed_collection( self, diff --git a/src/typesense/nl_search_model.py b/src/typesense/nl_search_model.py new file mode 100644 index 0000000..49aaab1 --- /dev/null +++ b/src/typesense/nl_search_model.py @@ -0,0 +1,108 @@ +""" +This module provides functionality for managing individual NL search models in Typesense. + +Classes: + - NLSearchModel: Handles operations related to a specific NL search model. + +Methods: + - __init__: Initializes the NLSearchModel object. + - _endpoint_path: Constructs the API endpoint path for this specific NL search model. + - retrieve: Retrieves the details of this specific NL search model. + - update: Updates this specific NL search model. + - delete: Deletes this specific NL search model. + +The NLSearchModel class interacts with the Typesense API to manage operations on a +specific NL search model. It provides methods to retrieve, update, +and delete individual models. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typesense.api_call import ApiCall +from typesense.types.nl_search_model import ( + NLSearchModelDeleteSchema, + NLSearchModelSchema, + NLSearchModelUpdateSchema, +) + + +class NLSearchModel: + """ + Class for managing individual NL search models in Typesense. + + This class provides methods to interact with a specific NL search model, + including retrieving, updating, and deleting it. + + Attributes: + model_id (str): The ID of the NL search model. + api_call (ApiCall): The API call object for making requests. + """ + + def __init__(self, api_call: ApiCall, model_id: str) -> None: + """ + Initialize the NLSearchModel object. + + Args: + api_call (ApiCall): The API call object for making requests. + model_id (str): The ID of the NL search model. + """ + self.model_id = model_id + self.api_call = api_call + + def retrieve(self) -> NLSearchModelSchema: + """ + Retrieve this specific NL search model. + + Returns: + NLSearchModelSchema: The schema containing the NL search model details. + """ + response = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=NLSearchModelSchema, + ) + return response + + def update(self, model: NLSearchModelUpdateSchema) -> NLSearchModelSchema: + """ + Update this specific NL search model. + + Args: + model (NLSearchModelUpdateSchema): + The schema containing the updated model details. + + Returns: + NLSearchModelSchema: The schema containing the updated NL search model. + """ + response: NLSearchModelSchema = self.api_call.put( + self._endpoint_path, + body=model, + entity_type=NLSearchModelSchema, + ) + return response + + def delete(self) -> NLSearchModelDeleteSchema: + """ + Delete this specific NL search model. + + Returns: + NLSearchModelDeleteSchema: The schema containing the deletion response. + """ + response: NLSearchModelDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=NLSearchModelDeleteSchema, + ) + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific NL search model. + + Returns: + str: The constructed endpoint path. + """ + from typesense.nl_search_models import NLSearchModels + + return "/".join([NLSearchModels.resource_path, self.model_id]) diff --git a/src/typesense/nl_search_models.py b/src/typesense/nl_search_models.py new file mode 100644 index 0000000..d184add --- /dev/null +++ b/src/typesense/nl_search_models.py @@ -0,0 +1,117 @@ +""" +This module provides functionality for managing NL search models in Typesense. + +Classes: + - NLSearchModels: Handles operations related to NL search models. + +Methods: + - __init__: Initializes the NLSearchModels object. + - __getitem__: Retrieves or creates an NLSearchModel object for a given model_id. + - create: Creates a new NL search model. + - retrieve: Retrieves all NL search models. + +Attributes: + - resource_path: The API resource path for NL search models operations. + +The NLSearchModels class interacts with the Typesense API to manage +NL search model operations. + +It provides methods to create and retrieve NL search models, as well as access +individual NLSearchModel objects. + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +from typesense.api_call import ApiCall +from typesense.types.nl_search_model import ( + NLSearchModelCreateSchema, + NLSearchModelSchema, + NLSearchModelsRetrieveSchema, +) + +if sys.version_info > (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.nl_search_model import NLSearchModel + + +class NLSearchModels(object): + """ + Class for managing NL search models in Typesense. + + This class provides methods to interact with NL search models, including + creating, retrieving, and accessing individual models. + + Attributes: + resource_path (str): The API resource path for NL search models operations. + api_call (ApiCall): The API call object for making requests. + nl_search_models (Dict[str, NLSearchModel]): + A dictionary of NLSearchModel objects. + """ + + resource_path: typing.Final[str] = "/nl_search_models" + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the NLSearchModels object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.nl_search_models: typing.Dict[str, NLSearchModel] = {} + + def __getitem__(self, model_id: str) -> NLSearchModel: + """ + Get or create an NLSearchModel object for a given model_id. + + Args: + model_id (str): The ID of the NL search model. + + Returns: + NLSearchModel: The NLSearchModel object for the given ID. + """ + if model_id not in self.nl_search_models: + self.nl_search_models[model_id] = NLSearchModel( + self.api_call, + model_id, + ) + return self.nl_search_models[model_id] + + def create(self, model: NLSearchModelCreateSchema) -> NLSearchModelSchema: + """ + Create a new NL search model. + + Args: + model (NLSearchModelCreateSchema): + The schema for creating the NL search model. + + Returns: + NLSearchModelSchema: The created NL search model. + """ + response = self.api_call.post( + endpoint=NLSearchModels.resource_path, + entity_type=NLSearchModelSchema, + as_json=True, + body=model, + ) + return response + + def retrieve(self) -> NLSearchModelsRetrieveSchema: + """ + Retrieve all NL search models. + + Returns: + NLSearchModelsRetrieveSchema: A list of all NL search models. + """ + response: NLSearchModelsRetrieveSchema = self.api_call.get( + endpoint=NLSearchModels.resource_path, + entity_type=NLSearchModelsRetrieveSchema, + as_json=True, + ) + return response diff --git a/src/typesense/types/nl_search_model.py b/src/typesense/types/nl_search_model.py new file mode 100644 index 0000000..5ad4570 --- /dev/null +++ b/src/typesense/types/nl_search_model.py @@ -0,0 +1,140 @@ +"""NLSearchModel types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class NLSearchModelBase(typing.TypedDict): + """ + Base schema with all possible fields for NL search models. + + Attributes: + model_name (str): Name of the LLM model. + api_key (str): The LLM service's API Key. + api_url (str): The API URL for the LLM service. + max_bytes (int): The maximum number of bytes to send to the LLM. + temperature (float): The temperature parameter for the LLM. + system_prompt (str): The system prompt for the LLM. + top_p (float): The top_p parameter (Google-specific). + top_k (int): The top_k parameter (Google-specific). + stop_sequences (list[str]): Stop sequences for the LLM (Google-specific). + api_version (str): API version (Google-specific). + project_id (str): GCP project ID (GCP Vertex AI specific). + access_token (str): Access token for GCP (GCP Vertex AI specific). + refresh_token (str): Refresh token for GCP (GCP Vertex AI specific). + client_id (str): Client ID for GCP (GCP Vertex AI specific). + client_secret (str): Client secret for GCP (GCP Vertex AI specific). + region (str): Region for GCP (GCP Vertex AI specific). + max_output_tokens (int): Maximum output tokens (GCP Vertex AI specific). + account_id (str): Account ID (Cloudflare specific). + """ + + model_name: str + api_key: typing.NotRequired[str] + api_url: typing.NotRequired[str] + max_bytes: typing.NotRequired[int] + temperature: typing.NotRequired[float] + system_prompt: typing.NotRequired[str] + # Google-specific parameters + top_p: typing.NotRequired[float] + top_k: typing.NotRequired[int] + stop_sequences: typing.NotRequired[typing.List[str]] + api_version: typing.NotRequired[str] + # GCP Vertex AI specific + project_id: typing.NotRequired[str] + access_token: typing.NotRequired[str] + refresh_token: typing.NotRequired[str] + client_id: typing.NotRequired[str] + client_secret: typing.NotRequired[str] + region: typing.NotRequired[str] + max_output_tokens: typing.NotRequired[int] + # Cloudflare specific + account_id: typing.NotRequired[str] + + +class NLSearchModelCreateSchema(NLSearchModelBase): + """ + Schema for creating a new NL search model. + + Attributes: + id (str): The custom ID of the model. + """ + + id: typing.NotRequired[str] + + +class NLSearchModelUpdateSchema(typing.TypedDict): + """ + Base schema with all possible fields for NL search models. + + Attributes: + model_name (str): Name of the LLM model. + api_key (str): The LLM service's API Key. + api_url (str): The API URL for the LLM service. + max_bytes (int): The maximum number of bytes to send to the LLM. + temperature (float): The temperature parameter for the LLM. + system_prompt (str): The system prompt for the LLM. + top_p (float): The top_p parameter (Google-specific). + top_k (int): The top_k parameter (Google-specific). + stop_sequences (list[str]): Stop sequences for the LLM (Google-specific). + api_version (str): API version (Google-specific). + project_id (str): GCP project ID (GCP Vertex AI specific). + access_token (str): Access token for GCP (GCP Vertex AI specific). + refresh_token (str): Refresh token for GCP (GCP Vertex AI specific). + client_id (str): Client ID for GCP (GCP Vertex AI specific). + client_secret (str): Client secret for GCP (GCP Vertex AI specific). + region (str): Region for GCP (GCP Vertex AI specific). + max_output_tokens (int): Maximum output tokens (GCP Vertex AI specific). + account_id (str): Account ID (Cloudflare specific). + """ + + model_name: typing.NotRequired[str] + api_key: typing.NotRequired[str] + api_url: typing.NotRequired[str] + max_bytes: typing.NotRequired[int] + temperature: typing.NotRequired[float] + system_prompt: typing.NotRequired[str] + # Google-specific parameters + top_p: typing.NotRequired[float] + top_k: typing.NotRequired[int] + stop_sequences: typing.NotRequired[typing.List[str]] + api_version: typing.NotRequired[str] + # GCP Vertex AI specific + project_id: typing.NotRequired[str] + access_token: typing.NotRequired[str] + refresh_token: typing.NotRequired[str] + client_id: typing.NotRequired[str] + client_secret: typing.NotRequired[str] + region: typing.NotRequired[str] + max_output_tokens: typing.NotRequired[int] + # Cloudflare specific + account_id: typing.NotRequired[str] + + +class NLSearchModelDeleteSchema(typing.TypedDict): + """ + Schema for deleting an NL search model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +class NLSearchModelSchema(NLSearchModelBase): + """ + Schema for an NL search model. + + Attributes: + id (str): The ID of the model. + """ + + id: str + + +NLSearchModelsRetrieveSchema = typing.List[NLSearchModelSchema] diff --git a/tests/fixtures/nl_search_model_fixtures.py b/tests/fixtures/nl_search_model_fixtures.py new file mode 100644 index 0000000..4949b98 --- /dev/null +++ b/tests/fixtures/nl_search_model_fixtures.py @@ -0,0 +1,78 @@ +"""Fixtures for the NL search model tests.""" + +import os + +import pytest +import requests +from dotenv import load_dotenv + +from typesense.api_call import ApiCall +from typesense.nl_search_model import NLSearchModel +from typesense.nl_search_models import NLSearchModels + +load_dotenv() + + +@pytest.fixture(scope="function", name="delete_all_nl_search_models") +def clear_typesense_nl_search_models() -> None: + """Remove all nl_search_models from the Typesense server.""" + url = "http://localhost:8108/nl_search_models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of models + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + + nl_search_models = response.json() + + # Delete each NL search model + for nl_search_model in nl_search_models: + model_id = nl_search_model.get("id") + delete_url = f"{url}/{model_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_nl_search_model") +def create_nl_search_model_fixture() -> str: + """Create an NL search model in the Typesense server.""" + url = "http://localhost:8108/nl_search_models" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + nl_search_model_data = { + "api_key": os.environ.get("OPEN_AI_KEY", "test-api-key"), + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is a system prompt for NL search", + } + + response = requests.post( + url, + headers=headers, + json=nl_search_model_data, + timeout=3, + ) + + response.raise_for_status() + + model_id: str = response.json()["id"] + return model_id + + +@pytest.fixture(scope="function", name="fake_nl_search_models") +def fake_nl_search_models_fixture(fake_api_call: ApiCall) -> NLSearchModels: + """Return an NLSearchModels object with test values.""" + return NLSearchModels(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_nl_search_model") +def fake_nl_search_model_fixture(fake_api_call: ApiCall) -> NLSearchModel: + """Return an NLSearchModel object with test values.""" + return NLSearchModel(fake_api_call, "nl_search_model_id") + + +@pytest.fixture(scope="function", name="actual_nl_search_models") +def actual_nl_search_models_fixture( + actual_api_call: ApiCall, +) -> NLSearchModels: + """Return an NLSearchModels object using a real API.""" + return NLSearchModels(actual_api_call) diff --git a/tests/nl_search_model_test.py b/tests/nl_search_model_test.py new file mode 100644 index 0000000..d47a536 --- /dev/null +++ b/tests/nl_search_model_test.py @@ -0,0 +1,99 @@ +"""Tests for the NLSearchModel class.""" + +from __future__ import annotations + +import pytest +from dotenv import load_dotenv + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, +) +from typesense.api_call import ApiCall +from typesense.nl_search_model import NLSearchModel +from typesense.nl_search_models import NLSearchModels + +load_dotenv() + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the NLSearchModel object is initialized correctly.""" + nl_search_model = NLSearchModel( + fake_api_call, + "nl_search_model_id", + ) + + assert nl_search_model.model_id == "nl_search_model_id" + assert_match_object(nl_search_model.api_call, fake_api_call) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test it can retrieve an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].retrieve() + + assert_to_contain_keys( + response, + ["id", "model_name", "system_prompt", "max_bytes", "api_key"], + ) + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +def test_actual_update( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can update an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].update( + {"system_prompt": "This is a new system prompt for NL search"}, + ) + + assert_to_contain_keys( + response, + [ + "id", + "model_name", + "system_prompt", + "max_bytes", + "api_key", + ], + ) + + assert response.get("system_prompt") == "This is a new system prompt for NL search" + assert response.get("id") == create_nl_search_model + + +@pytest.mark.open_ai +def test_actual_delete( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can delete an NL search model from Typesense Server.""" + response = actual_nl_search_models[create_nl_search_model].delete() + + assert_to_contain_keys( + response, + ["id"], + ) + + assert response.get("id") == create_nl_search_model diff --git a/tests/nl_search_models_test.py b/tests/nl_search_models_test.py new file mode 100644 index 0000000..1558b39 --- /dev/null +++ b/tests/nl_search_models_test.py @@ -0,0 +1,117 @@ +"""Tests for the NLSearchModels class.""" + +from __future__ import annotations + +import os +import sys + +import pytest + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_keys, + assert_to_contain_object, +) +from typesense.api_call import ApiCall +from typesense.nl_search_models import NLSearchModels +from typesense.types.nl_search_model import NLSearchModelSchema + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the NLSearchModels object is initialized correctly.""" + nl_search_models = NLSearchModels(fake_api_call) + + assert_match_object(nl_search_models.api_call, fake_api_call) + assert_object_lists_match( + nl_search_models.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_models.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not nl_search_models.nl_search_models + + +def test_get_missing_nl_search_model( + fake_nl_search_models: NLSearchModels, +) -> None: + """Test that the NLSearchModels object can get a missing nl_search_model.""" + nl_search_model = fake_nl_search_models["nl_search_model_id"] + + assert_match_object( + nl_search_model.api_call, + fake_nl_search_models.api_call, + ) + assert_object_lists_match( + nl_search_model.api_call.node_manager.nodes, + fake_nl_search_models.api_call.node_manager.nodes, + ) + assert_match_object( + nl_search_model.api_call.config.nearest_node, + fake_nl_search_models.api_call.config.nearest_node, + ) + assert ( + nl_search_model._endpoint_path # noqa: WPS437 + == "/nl_search_models/nl_search_model_id" + ) + + +def test_get_existing_nl_search_model( + fake_nl_search_models: NLSearchModels, +) -> None: + """Test that the NLSearchModels object can get an existing nl_search_model.""" + nl_search_model = fake_nl_search_models["nl_search_model_id"] + fetched_nl_search_model = fake_nl_search_models["nl_search_model_id"] + + assert len(fake_nl_search_models.nl_search_models) == 1 + + assert nl_search_model is fetched_nl_search_model + + +@pytest.mark.open_ai +def test_actual_create( + actual_nl_search_models: NLSearchModels, +) -> None: + """Test that it can create an NL search model on Typesense Server.""" + response = actual_nl_search_models.create( + { + "api_key": os.environ.get("OPEN_AI_KEY", "test-api-key"), + "max_bytes": 16384, + "model_name": "openai/gpt-3.5-turbo", + "system_prompt": "This is meant for testing purposes", + }, + ) + + assert_to_contain_keys( + response, + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) + + +@pytest.mark.open_ai +def test_actual_retrieve( + actual_nl_search_models: NLSearchModels, + delete_all_nl_search_models: None, + create_nl_search_model: str, +) -> None: + """Test that it can retrieve NL search models from Typesense Server.""" + response = actual_nl_search_models.retrieve() + assert len(response) == 1 + assert_to_contain_object( + response[0], + { + "id": create_nl_search_model, + }, + ) + assert_to_contain_keys( + response[0], + ["id", "api_key", "max_bytes", "model_name", "system_prompt"], + ) From ca6def8f5ef981a6a3f4e243f79d029de31979e0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 2 Jul 2025 13:59:37 +0300 Subject: [PATCH 255/288] feat(types): add natural language query support to search parameters - add `NLLanguageParameters` type with nl query configuration options - add `LLMResponse` and `ParsedNLQuery` types for nl query responses - extend `SearchParameters` to include natural language parameters - add `parsed_nl_query` field to `SearchResponse` for nl query metadata --- src/typesense/types/document.py | 52 +++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/src/typesense/types/document.py b/src/typesense/types/document.py index 416ca7e..a0b63b4 100644 --- a/src/typesense/types/document.py +++ b/src/typesense/types/document.py @@ -569,6 +569,23 @@ class CachingParameters(typing.TypedDict): cache_ttl: typing.NotRequired[int] +class NLLanguageParameters(typing.TypedDict): + """ + Parameters regarding [caching search results](https://typesense.org/docs/26.0/api/search.html#caching-parameters). + + Attributes: + nl_query_prompt_cache_ttl (int): The duration (in seconds) that determines how long the schema prompts are cached. + nl_query (bool): Whether to use natural language in the query or not. + nl_model_id (str): The ID of the natural language model to use for the query. + nl_query_debug (bool): Whether to return the raw LLM response or not. + """ + + nl_query_prompt_cache_ttl: typing.NotRequired[int] + nl_query: typing.NotRequired[bool] + nl_model_id: typing.NotRequired[str] + nl_query_debug: typing.NotRequired[bool] + + class SearchParameters( RequiredSearchParameters, QueryParameters, @@ -580,6 +597,7 @@ class SearchParameters( ResultsParameters, TypoToleranceParameters, CachingParameters, + NLLanguageParameters, ): """Parameters for searching documents.""" @@ -823,6 +841,38 @@ class Conversation(typing.TypedDict): query: str +class LLMResponse(typing.TypedDict): + """ + Schema for a raw LLM response. + + Attributes: + content (str): Content of the LLM response. + extraction_method (str): Extraction method of the LLM response (e.g. "regex"). + model (str): Model used to generate the response. + """ + + content: str + extraction_method: str + model: str + + +class ParsedNLQuery(typing.TypedDict): + """ + Schema for a parsed natural language query. + + Attributes: + parse_time_ms (int): Parse time in milliseconds. + generated_params (SearchParameters): Generated parameters. + augmented_params (SearchParameters): Augmented parameters. + llm_response (LLMResponse): Raw LLM response. + """ + + parse_time_ms: int + generated_params: SearchParameters + augmented_params: SearchParameters + llm_response: typing.NotRequired[LLMResponse] + + class SearchResponse(typing.Generic[TDoc], typing.TypedDict): """ Schema for a search response. @@ -838,6 +888,7 @@ class SearchResponse(typing.Generic[TDoc], typing.TypedDict): hits (list[Hit[TDoc]]): List of hits in the search results. grouped_hits (list[GroupedHit[TDoc]]): List of grouped hits in the search results. conversation (Conversation): Conversation in the search results. + parsed_nl_query (ParsedNLQuery): Information about the natural language query """ facet_counts: typing.List[SearchResponseFacetCountSchema] @@ -850,6 +901,7 @@ class SearchResponse(typing.Generic[TDoc], typing.TypedDict): hits: typing.List[Hit[TDoc]] grouped_hits: typing.NotRequired[typing.List[GroupedHit[TDoc]]] conversation: typing.NotRequired[Conversation] + parsed_nl_query: typing.NotRequired[ParsedNLQuery] class DeleteSingleDocumentParameters(typing.TypedDict): From 85cabeff78f3f2552479c418c1d4e57526a8c4c8 Mon Sep 17 00:00:00 2001 From: Laurent Constantin Date: Mon, 7 Jul 2025 11:09:37 +0200 Subject: [PATCH 256/288] Fix: union in multisearch --- src/typesense/multi_search.py | 5 +++- tests/multi_search_test.py | 45 +++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/src/typesense/multi_search.py b/src/typesense/multi_search.py index 47a9fa9..9e3792f 100644 --- a/src/typesense/multi_search.py +++ b/src/typesense/multi_search.py @@ -83,7 +83,10 @@ def perform( stringify_search_params(search_params) for search_params in search_queries.get("searches") ] - search_body = {"searches": stringified_search_params} + search_body = { + "searches": stringified_search_params, + "union": search_queries.get("union", False), + } response: MultiSearchResponse = self.api_call.post( MultiSearch.resource_path, body=search_body, diff --git a/tests/multi_search_test.py b/tests/multi_search_test.py index 59727a7..eac190b 100644 --- a/tests/multi_search_test.py +++ b/tests/multi_search_test.py @@ -106,6 +106,51 @@ def test_multi_search_multiple_searches( ) +def test_multi_search_union( + actual_multi_search: MultiSearch, + actual_api_call: ApiCall, + delete_all: None, + create_collection: None, + create_document: None, +) -> None: + """Test that the MultiSearch object can perform multiple searches.""" + request_params: MultiSearchRequestSchema = { + "union": True, + "searches": [ + {"q": "com", "query_by": "company_name", "collection": "companies"}, + {"q": "company", "query_by": "company_name", "collection": "companies"}, + ], + } + + response = actual_multi_search.perform(search_queries=request_params) + + assert_to_contain_keys( + response, + [ + "found", + "hits", + "page", + "out_of", + "union_request_params", + "search_time_ms", + "search_cutoff", + ], + ) + + assert_to_contain_keys( + response.get("hits")[0], + [ + "collection", + "document", + "highlights", + "highlight", + "text_match", + "text_match_info", + "search_index", + ], + ) + + def test_multi_search_array( actual_multi_search: MultiSearch, actual_api_call: ApiCall, From fba5eb33021fedb70a05f3c0f3d3956b1941e4db Mon Sep 17 00:00:00 2001 From: Harisaran G Date: Tue, 26 Aug 2025 13:27:13 +0530 Subject: [PATCH 257/288] add: analytics API --- examples/analytics_operations.py | 12 +- src/typesense/analytics.py | 46 +--- src/typesense/analytics_events.py | 73 ++++++ src/typesense/analytics_rule.py | 98 ++----- src/typesense/analytics_rule_v1.py | 106 ++++++++ src/typesense/analytics_rules.py | 170 +++---------- src/typesense/analytics_rules_v1.py | 165 ++++++++++++ src/typesense/analytics_v1.py | 44 ++++ src/typesense/client.py | 5 +- src/typesense/types/analytics.py | 84 ++++++ ...analytics_rule.py => analytics_rule_v1.py} | 4 +- src/typesense/types/collection.py | 1 + tests/analytics_events_test.py | 140 ++++++++++ tests/analytics_rule_test.py | 121 +++------ tests/analytics_rule_v1_test.py | 129 ++++++++++ tests/analytics_rules_test.py | 240 ++++++------------ tests/analytics_rules_v1_test.py | 234 +++++++++++++++++ tests/analytics_test.py | 9 +- tests/analytics_v1_test.py | 27 ++ tests/client_test.py | 6 +- tests/collection_test.py | 1 + tests/collections_test.py | 5 + ...rule_fixtures.py => analytics_fixtures.py} | 34 ++- tests/fixtures/analytics_rule_v1_fixtures.py | 70 +++++ tests/import_test.py | 6 +- tests/synonym_test.py | 18 ++ tests/synonyms_test.py | 18 ++ tests/utils/version.py | 20 ++ 28 files changed, 1350 insertions(+), 536 deletions(-) create mode 100644 src/typesense/analytics_events.py create mode 100644 src/typesense/analytics_rule_v1.py create mode 100644 src/typesense/analytics_rules_v1.py create mode 100644 src/typesense/analytics_v1.py create mode 100644 src/typesense/types/analytics.py rename src/typesense/types/{analytics_rule.py => analytics_rule_v1.py} (98%) create mode 100644 tests/analytics_events_test.py create mode 100644 tests/analytics_rule_v1_test.py create mode 100644 tests/analytics_rules_v1_test.py create mode 100644 tests/analytics_v1_test.py rename tests/fixtures/{analytics_rule_fixtures.py => analytics_fixtures.py} (75%) create mode 100644 tests/fixtures/analytics_rule_v1_fixtures.py create mode 100644 tests/utils/version.py diff --git a/examples/analytics_operations.py b/examples/analytics_operations.py index c625c99..6593baf 100644 --- a/examples/analytics_operations.py +++ b/examples/analytics_operations.py @@ -12,12 +12,12 @@ # Drop pre-existing rule if any try: - client.analytics.rules['top_queries'].delete() + client.analyticsV1.rules['top_queries'].delete() except Exception as e: pass # Create a new rule -create_response = client.analytics.rules.create({ +create_response = client.analyticsV1.rules.create({ "name": "top_queries", "type": "popular_queries", "params": { @@ -33,10 +33,10 @@ print(create_response) # Try to fetch it back -print(client.analytics.rules['top_queries'].retrieve()) +print(client.analyticsV1.rules['top_queries'].retrieve()) # Update the rule -update_response = client.analytics.rules.upsert('top_queries', { +update_response = client.analyticsV1.rules.upsert('top_queries', { "name": "top_queries", "type": "popular_queries", "params": { @@ -52,7 +52,7 @@ print(update_response) # List all rules -print(client.analytics.rules.retrieve()) +print(client.analyticsV1.rules.retrieve()) # Delete the rule -print(client.analytics.rules['top_queries'].delete()) +print(client.analyticsV1.rules['top_queries'].delete()) diff --git a/src/typesense/analytics.py b/src/typesense/analytics.py index 941cca5..3463748 100644 --- a/src/typesense/analytics.py +++ b/src/typesense/analytics.py @@ -1,42 +1,24 @@ -""" -This module provides functionality for managing analytics in Typesense. +"""Client for Typesense Analytics module.""" -Classes: - - Analytics: Handles operations related to analytics, including access to analytics rules. +import sys -Methods: - - __init__: Initializes the Analytics object. +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing -The Analytics class serves as an entry point for analytics-related operations in Typesense, -currently providing access to AnalyticsRules. - -For more information on analytics, refer to the Analytics & Query Suggestion -[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) - -This module uses type hinting and is compatible with Python 3.11+ as well as earlier -versions through the use of the typing_extensions library. -""" - -from typesense.analytics_rules import AnalyticsRules from typesense.api_call import ApiCall +from typesense.analytics_events import AnalyticsEvents +from typesense.analytics_rules import AnalyticsRules -class Analytics(object): - """ - Class for managing analytics in Typesense. +class Analytics: + """Client for v30 Analytics endpoints.""" - This class provides access to analytics-related functionalities, - currently including operations on analytics rules. + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + self.rules = AnalyticsRules(api_call) + self.events = AnalyticsEvents(api_call) - Attributes: - rules (AnalyticsRules): An instance of AnalyticsRules for managing analytics rules. - """ - def __init__(self, api_call: ApiCall) -> None: - """ - Initialize the Analytics object. - Args: - api_call (ApiCall): The API call object for making requests. - """ - self.rules = AnalyticsRules(api_call) diff --git a/src/typesense/analytics_events.py b/src/typesense/analytics_events.py new file mode 100644 index 0000000..c462e6c --- /dev/null +++ b/src/typesense/analytics_events.py @@ -0,0 +1,73 @@ +"""Client for Analytics events and status operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.analytics import ( + AnalyticsEvent as AnalyticsEventSchema, + AnalyticsEventCreateResponse, + AnalyticsEventsResponse, + AnalyticsStatus, +) + + +class AnalyticsEvents: + events_path: typing.Final[str] = "/analytics/events" + flush_path: typing.Final[str] = "/analytics/flush" + status_path: typing.Final[str] = "/analytics/status" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + + def create(self, event: AnalyticsEventSchema) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = self.api_call.post( + AnalyticsEvents.events_path, + body=event, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + def retrieve( + self, + *, + user_id: str, + name: str, + n: int, + ) -> AnalyticsEventsResponse: + params: typing.Dict[str, typing.Union[str, int]] = { + "user_id": user_id, + "name": name, + "n": n, + } + response: AnalyticsEventsResponse = self.api_call.get( + AnalyticsEvents.events_path, + params=params, + as_json=True, + entity_type=AnalyticsEventsResponse, + ) + return response + + def flush(self) -> AnalyticsEventCreateResponse: + response: AnalyticsEventCreateResponse = self.api_call.post( + AnalyticsEvents.flush_path, + body={}, + as_json=True, + entity_type=AnalyticsEventCreateResponse, + ) + return response + + def status(self) -> AnalyticsStatus: + response: AnalyticsStatus = self.api_call.get( + AnalyticsEvents.status_path, + as_json=True, + entity_type=AnalyticsStatus, + ) + return response + + diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index 29e9a64..d9c21b2 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,24 +1,4 @@ -""" -This module provides functionality for managing individual analytics rules in Typesense. - -Classes: - - AnalyticsRule: Handles operations related to a specific analytics rule. - -Methods: - - __init__: Initializes the AnalyticsRule object. - - _endpoint_path: Constructs the API endpoint path for this specific analytics rule. - - retrieve: Retrieves the details of this specific analytics rule. - - delete: Deletes this specific analytics rule. - -The AnalyticsRule class interacts with the Typesense API to manage operations on a -specific analytics rule. It provides methods to retrieve and delete individual rules. - -For more information on analytics, refer to the Analytics & Query Suggestion -[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) - -This module uses type hinting and is compatible with Python 3.11+ as well as earlier -versions through the use of the typing_extensions library. -""" +"""Per-rule client for Analytics rules operations.""" import sys @@ -28,77 +8,33 @@ import typing_extensions as typing from typesense.api_call import ApiCall -from typesense.types.analytics_rule import ( - RuleDeleteSchema, - RuleSchemaForCounters, - RuleSchemaForQueries, -) +from typesense.types.analytics import AnalyticsRule class AnalyticsRule: - """ - Class for managing individual analytics rules in Typesense. - - This class provides methods to interact with a specific analytics rule, - including retrieving and deleting it. - - Attributes: - api_call (ApiCall): The API call object for making requests. - rule_id (str): The ID of the analytics rule. - """ - - def __init__(self, api_call: ApiCall, rule_id: str): - """ - Initialize the AnalyticsRule object. - - Args: - api_call (ApiCall): The API call object for making requests. - rule_id (str): The ID of the analytics rule. - """ + def __init__(self, api_call: ApiCall, rule_name: str) -> None: self.api_call = api_call - self.rule_id = rule_id + self.rule_name = rule_name + + @property + def _endpoint_path(self) -> str: + from typesense.analytics_rules import AnalyticsRules - def retrieve( - self, - ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: - """ - Retrieve this specific analytics rule. + return "/".join([AnalyticsRules.resource_path, self.rule_name]) - Returns: - Union[RuleSchemaForQueries, RuleSchemaForCounters]: - The schema containing the rule details. - """ - response: typing.Union[RuleSchemaForQueries, RuleSchemaForCounters] = ( - self.api_call.get( - self._endpoint_path, - entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], - as_json=True, - ) + def retrieve(self) -> AnalyticsRule: + response: AnalyticsRule = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=AnalyticsRule, ) return response - def delete(self) -> RuleDeleteSchema: - """ - Delete this specific analytics rule. - - Returns: - RuleDeleteSchema: The schema containing the deletion response. - """ - response: RuleDeleteSchema = self.api_call.delete( + def delete(self) -> AnalyticsRule: + response: AnalyticsRule = self.api_call.delete( self._endpoint_path, - entity_type=RuleDeleteSchema, + entity_type=AnalyticsRule, ) - return response - @property - def _endpoint_path(self) -> str: - """ - Construct the API endpoint path for this specific analytics rule. - - Returns: - str: The constructed endpoint path. - """ - from typesense.analytics_rules import AnalyticsRules - return "/".join([AnalyticsRules.resource_path, self.rule_id]) diff --git a/src/typesense/analytics_rule_v1.py b/src/typesense/analytics_rule_v1.py new file mode 100644 index 0000000..dc6890d --- /dev/null +++ b/src/typesense/analytics_rule_v1.py @@ -0,0 +1,106 @@ +""" +This module provides functionality for managing individual analytics rules in Typesense (V1). + +Classes: + - AnalyticsRuleV1: Handles operations related to a specific analytics rule. + +Methods: + - __init__: Initializes the AnalyticsRuleV1 object. + - _endpoint_path: Constructs the API endpoint path for this specific analytics rule. + - retrieve: Retrieves the details of this specific analytics rule. + - delete: Deletes this specific analytics rule. + +The AnalyticsRuleV1 class interacts with the Typesense API to manage operations on a +specific analytics rule. It provides methods to retrieve and delete individual rules. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.analytics_rule_v1 import ( + RuleDeleteSchema, + RuleSchemaForCounters, + RuleSchemaForQueries, +) + + +class AnalyticsRuleV1: + """ + Class for managing individual analytics rules in Typesense (V1). + + This class provides methods to interact with a specific analytics rule, + including retrieving and deleting it. + + Attributes: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + + def __init__(self, api_call: ApiCall, rule_id: str): + """ + Initialize the AnalyticsRuleV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + rule_id (str): The ID of the analytics rule. + """ + self.api_call = api_call + self.rule_id = rule_id + + def retrieve( + self, + ) -> typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]: + """ + Retrieve this specific analytics rule. + + Returns: + Union[RuleSchemaForQueries, RuleSchemaForCounters]: + The schema containing the rule details. + """ + response: typing.Union[RuleSchemaForQueries, RuleSchemaForCounters] = ( + self.api_call.get( + self._endpoint_path, + entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + as_json=True, + ) + ) + return response + + def delete(self) -> RuleDeleteSchema: + """ + Delete this specific analytics rule. + + Returns: + RuleDeleteSchema: The schema containing the deletion response. + """ + response: RuleDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=RuleDeleteSchema, + ) + + return response + + @property + def _endpoint_path(self) -> str: + """ + Construct the API endpoint path for this specific analytics rule. + + Returns: + str: The constructed endpoint path. + """ + from typesense.analytics_rules_v1 import AnalyticsRulesV1 + + return "/".join([AnalyticsRulesV1.resource_path, self.rule_id]) + + diff --git a/src/typesense/analytics_rules.py b/src/typesense/analytics_rules.py index 89f748a..2097e0b 100644 --- a/src/typesense/analytics_rules.py +++ b/src/typesense/analytics_rules.py @@ -1,29 +1,4 @@ -""" -This module provides functionality for managing analytics rules in Typesense. - -Classes: - - AnalyticsRules: Handles operations related to analytics rules. - -Methods: - - __init__: Initializes the AnalyticsRules object. - - __getitem__: Retrieves or creates an AnalyticsRule object for a given rule_id. - - create: Creates a new analytics rule. - - upsert: Creates or updates an analytics rule. - - retrieve: Retrieves all analytics rules. - -Attributes: - - resource_path: The API resource path for analytics rules. - -The AnalyticsRules class interacts with the Typesense API to manage analytics rule operations. -It provides methods to create, update, and retrieve analytics rules, as well as access -individual AnalyticsRule objects. - -For more information on analytics, refer to the Analytics & Query Suggestion -[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) - -This module uses type hinting and is compatible with Python 3.11+ as well as earlier -versions through the use of the typing_extensions library. -""" +"""Client for Analytics rules collection operations.""" import sys @@ -32,132 +7,53 @@ else: import typing_extensions as typing -from typesense.analytics_rule import AnalyticsRule from typesense.api_call import ApiCall -from typesense.types.analytics_rule import ( - RuleCreateSchemaForCounters, - RuleCreateSchemaForQueries, - RuleSchemaForCounters, - RuleSchemaForQueries, - RulesRetrieveSchema, +from typesense.types.analytics import ( + AnalyticsRule, + AnalyticsRuleCreate, + AnalyticsRuleUpdate, ) -_RuleParams = typing.Union[ - typing.Dict[str, typing.Union[str, int, bool]], - None, -] - class AnalyticsRules(object): - """ - Class for managing analytics rules in Typesense. - - This class provides methods to interact with analytics rules, including - creating, updating, and retrieving them. - - Attributes: - resource_path (str): The API resource path for analytics rules. - api_call (ApiCall): The API call object for making requests. - rules (Dict[str, AnalyticsRule]): A dictionary of AnalyticsRule objects. - """ - resource_path: typing.Final[str] = "/analytics/rules" - def __init__(self, api_call: ApiCall): - """ - Initialize the AnalyticsRules object. - - Args: - api_call (ApiCall): The API call object for making requests. - """ + def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - self.rules: typing.Dict[str, AnalyticsRule] = {} - - def __getitem__(self, rule_id: str) -> AnalyticsRule: - """ - Get or create an AnalyticsRule object for a given rule_id. - - Args: - rule_id (str): The ID of the analytics rule. - - Returns: - AnalyticsRule: The AnalyticsRule object for the given ID. - """ - if not self.rules.get(rule_id): - self.rules[rule_id] = AnalyticsRule(self.api_call, rule_id) - return self.rules[rule_id] + self.rules: typing.Dict[str, "AnalyticsRule"] = {} - def create( - self, - rule: typing.Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries], - rule_parameters: _RuleParams = None, - ) -> typing.Union[RuleSchemaForCounters, RuleSchemaForQueries]: - """ - Create a new analytics rule. + def __getitem__(self, rule_name: str) -> "AnalyticsRule": + if rule_name not in self.rules: + from typesense.analytics_rule import AnalyticsRule as PerRule - This method can create both counter rules and query rules. + self.rules[rule_name] = PerRule(self.api_call, rule_name) + return typing.cast("AnalyticsRule", self.rules[rule_name]) - Args: - rule (Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries]): - The rule schema. Use RuleCreateSchemaForCounters for counter rules - and RuleCreateSchemaForQueries for query rules. - - rule_parameters (_RuleParams, optional): Additional rule parameters. - - Returns: - Union[RuleSchemaForCounters, RuleSchemaForQueries]: - The created rule. Returns RuleSchemaForCounters for counter rules - and RuleSchemaForQueries for query rules. - """ - response: typing.Union[RuleSchemaForCounters, RuleSchemaForQueries] = ( - self.api_call.post( - AnalyticsRules.resource_path, - body=rule, - params=rule_parameters, - as_json=True, - entity_type=typing.Union[ - RuleSchemaForCounters, - RuleSchemaForQueries, - ], - ) - ) - return response - - def upsert( - self, - rule_id: str, - rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], - ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: - """ - Create or update an analytics rule. - - Args: - rule_id (str): The ID of the rule to upsert. - rule (Union[RuleCreateSchemaForQueries, RuleSchemaForCounters]): The rule schema. - - Returns: - Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: The upserted rule. - """ - response = self.api_call.put( - "/".join([AnalyticsRules.resource_path, rule_id]), + def create(self, rule: AnalyticsRuleCreate) -> AnalyticsRule: + response: AnalyticsRule = self.api_call.post( + AnalyticsRules.resource_path, body=rule, - entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], - ) - return typing.cast( - typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], - response, + as_json=True, + entity_type=AnalyticsRule, ) + return response - def retrieve(self) -> RulesRetrieveSchema: - """ - Retrieve all analytics rules. - - Returns: - RulesRetrieveSchema: The schema containing all analytics rules. - """ - response: RulesRetrieveSchema = self.api_call.get( + def retrieve(self, *, rule_tag: typing.Union[str, None] = None) -> typing.List[AnalyticsRule]: + params: typing.Dict[str, str] = {} + if rule_tag: + params["rule_tag"] = rule_tag + response: typing.List[AnalyticsRule] = self.api_call.get( AnalyticsRules.resource_path, + params=params if params else None, as_json=True, - entity_type=RulesRetrieveSchema, + entity_type=typing.List[AnalyticsRule], ) return response + + def upsert(self, rule_name: str, update: AnalyticsRuleUpdate) -> AnalyticsRule: + response: AnalyticsRule = self.api_call.put( + "/".join([AnalyticsRules.resource_path, rule_name]), + body=update, + entity_type=AnalyticsRule, + ) + return response \ No newline at end of file diff --git a/src/typesense/analytics_rules_v1.py b/src/typesense/analytics_rules_v1.py new file mode 100644 index 0000000..a850d37 --- /dev/null +++ b/src/typesense/analytics_rules_v1.py @@ -0,0 +1,165 @@ +""" +This module provides functionality for managing analytics rules in Typesense (V1). + +Classes: + - AnalyticsRulesV1: Handles operations related to analytics rules. + +Methods: + - __init__: Initializes the AnalyticsRulesV1 object. + - __getitem__: Retrieves or creates an AnalyticsRuleV1 object for a given rule_id. + - create: Creates a new analytics rule. + - upsert: Creates or updates an analytics rule. + - retrieve: Retrieves all analytics rules. + +Attributes: + - resource_path: The API resource path for analytics rules. + +The AnalyticsRulesV1 class interacts with the Typesense API to manage analytics rule operations. +It provides methods to create, update, and retrieve analytics rules, as well as access +individual AnalyticsRuleV1 objects. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.analytics_rule_v1 import AnalyticsRuleV1 +from typesense.api_call import ApiCall +from typesense.types.analytics_rule_v1 import ( + RuleCreateSchemaForCounters, + RuleCreateSchemaForQueries, + RuleSchemaForCounters, + RuleSchemaForQueries, + RulesRetrieveSchema, +) + +_RuleParams = typing.Union[ + typing.Dict[str, typing.Union[str, int, bool]], + None, +] + + +class AnalyticsRulesV1(object): + """ + Class for managing analytics rules in Typesense (V1). + + This class provides methods to interact with analytics rules, including + creating, updating, and retrieving them. + + Attributes: + resource_path (str): The API resource path for analytics rules. + api_call (ApiCall): The API call object for making requests. + rules (Dict[str, AnalyticsRuleV1]): A dictionary of AnalyticsRuleV1 objects. + """ + + resource_path: typing.Final[str] = "/analytics/rules" + + def __init__(self, api_call: ApiCall): + """ + Initialize the AnalyticsRulesV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.api_call = api_call + self.rules: typing.Dict[str, AnalyticsRuleV1] = {} + + def __getitem__(self, rule_id: str) -> AnalyticsRuleV1: + """ + Get or create an AnalyticsRuleV1 object for a given rule_id. + + Args: + rule_id (str): The ID of the analytics rule. + + Returns: + AnalyticsRuleV1: The AnalyticsRuleV1 object for the given ID. + """ + if not self.rules.get(rule_id): + self.rules[rule_id] = AnalyticsRuleV1(self.api_call, rule_id) + return self.rules[rule_id] + + def create( + self, + rule: typing.Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries], + rule_parameters: _RuleParams = None, + ) -> typing.Union[RuleSchemaForCounters, RuleSchemaForQueries]: + """ + Create a new analytics rule. + + This method can create both counter rules and query rules. + + Args: + rule (Union[RuleCreateSchemaForCounters, RuleCreateSchemaForQueries]): + The rule schema. Use RuleCreateSchemaForCounters for counter rules + and RuleCreateSchemaForQueries for query rules. + + rule_parameters (_RuleParams, optional): Additional rule parameters. + + Returns: + Union[RuleSchemaForCounters, RuleSchemaForQueries]: + The created rule. Returns RuleSchemaForCounters for counter rules + and RuleSchemaForQueries for query rules. + """ + response: typing.Union[RuleSchemaForCounters, RuleSchemaForQueries] = ( + self.api_call.post( + AnalyticsRulesV1.resource_path, + body=rule, + params=rule_parameters, + as_json=True, + entity_type=typing.Union[ + RuleSchemaForCounters, + RuleSchemaForQueries, + ], + ) + ) + return response + + def upsert( + self, + rule_id: str, + rule: typing.Union[RuleCreateSchemaForQueries, RuleSchemaForCounters], + ) -> typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: + """ + Create or update an analytics rule. + + Args: + rule_id (str): The ID of the rule to upsert. + rule (Union[RuleCreateSchemaForQueries, RuleSchemaForCounters]): The rule schema. + + Returns: + Union[RuleSchemaForCounters, RuleCreateSchemaForQueries]: The upserted rule. + """ + response = self.api_call.put( + "/".join([AnalyticsRulesV1.resource_path, rule_id]), + body=rule, + entity_type=typing.Union[RuleSchemaForQueries, RuleSchemaForCounters], + ) + return typing.cast( + typing.Union[RuleSchemaForCounters, RuleCreateSchemaForQueries], + response, + ) + + def retrieve(self) -> RulesRetrieveSchema: + """ + Retrieve all analytics rules. + + Returns: + RulesRetrieveSchema: The schema containing all analytics rules. + """ + response: RulesRetrieveSchema = self.api_call.get( + AnalyticsRulesV1.resource_path, + as_json=True, + entity_type=RulesRetrieveSchema, + ) + return response + + diff --git a/src/typesense/analytics_v1.py b/src/typesense/analytics_v1.py new file mode 100644 index 0000000..b75bfbb --- /dev/null +++ b/src/typesense/analytics_v1.py @@ -0,0 +1,44 @@ +""" +This module provides functionality for managing analytics (V1) in Typesense. + +Classes: + - AnalyticsV1: Handles operations related to analytics, including access to analytics rules. + +Methods: + - __init__: Initializes the AnalyticsV1 object. + +The AnalyticsV1 class serves as an entry point for analytics-related operations in Typesense, +currently providing access to AnalyticsRulesV1. + +For more information on analytics, refer to the Analytics & Query Suggestion +[documentation](https://typesense.org/docs/27.0/api/analytics-query-suggestions.html) + +This module uses type hinting and is compatible with Python 3.11+ as well as earlier +versions through the use of the typing_extensions library. +""" + +from typesense.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.api_call import ApiCall + + +class AnalyticsV1(object): + """ + Class for managing analytics in Typesense (V1). + + This class provides access to analytics-related functionalities, + currently including operations on analytics rules. + + Attributes: + rules (AnalyticsRulesV1): An instance of AnalyticsRulesV1 for managing analytics rules. + """ + + def __init__(self, api_call: ApiCall) -> None: + """ + Initialize the AnalyticsV1 object. + + Args: + api_call (ApiCall): The API call object for making requests. + """ + self.rules = AnalyticsRulesV1(api_call) + + diff --git a/src/typesense/client.py b/src/typesense/client.py index f60acd0..d5d7dee 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -36,6 +36,7 @@ import typing_extensions as typing from typesense.aliases import Aliases +from typesense.analytics_v1 import AnalyticsV1 from typesense.analytics import Analytics from typesense.api_call import ApiCall from typesense.collection import Collection @@ -70,7 +71,8 @@ class Client: multi_search (MultiSearch): Instance for performing multi-search operations. keys (Keys): Instance for managing API keys. aliases (Aliases): Instance for managing collection aliases. - analytics (Analytics): Instance for analytics operations. + analyticsV1 (AnalyticsV1): Instance for analytics operations (V1). + analytics (AnalyticsV30): Instance for analytics operations (v30). stemming (Stemming): Instance for stemming dictionary operations. operations (Operations): Instance for various Typesense operations. debug (Debug): Instance for debug operations. @@ -101,6 +103,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.multi_search = MultiSearch(self.api_call) self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) + self.analyticsV1 = AnalyticsV1(self.api_call) self.analytics = Analytics(self.api_call) self.stemming = Stemming(self.api_call) self.operations = Operations(self.api_call) diff --git a/src/typesense/types/analytics.py b/src/typesense/types/analytics.py new file mode 100644 index 0000000..540c8b4 --- /dev/null +++ b/src/typesense/types/analytics.py @@ -0,0 +1,84 @@ +"""Types for Analytics endpoints and Analytics Rules.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class AnalyticsEvent(typing.TypedDict): + """Schema for an analytics event to be created.""" + + name: str + event_type: str + data: typing.Dict[str, typing.Any] + + +class AnalyticsEventCreateResponse(typing.TypedDict): + """Response schema for creating an analytics event and for flush.""" + + ok: bool + + +class _AnalyticsEventItem(typing.TypedDict, total=False): + name: str + event_type: str + collection: str + timestamp: int + user_id: str + doc_id: str + doc_ids: typing.List[str] + query: str + + +class AnalyticsEventsResponse(typing.TypedDict): + """Response schema for retrieving analytics events.""" + + events: typing.List[_AnalyticsEventItem] + + +class AnalyticsStatus(typing.TypedDict, total=False): + """Response schema for analytics status.""" + + popular_prefix_queries: int + nohits_prefix_queries: int + log_prefix_queries: int + query_log_events: int + query_counter_events: int + doc_log_events: int + doc_counter_events: int + + +# Rules + +class AnalyticsRuleParams(typing.TypedDict, total=False): + destination_collection: str + limit: int + capture_search_requests: bool + meta_fields: typing.List[str] + expand_query: bool + counter_field: str + weight: int + + +class AnalyticsRuleCreate(typing.TypedDict): + name: str + type: str + collection: str + event_type: str + params: AnalyticsRuleParams + rule_tag: typing.NotRequired[str] + + +class AnalyticsRuleUpdate(typing.TypedDict, total=False): + name: str + rule_tag: str + params: AnalyticsRuleParams + + +class AnalyticsRule(AnalyticsRuleCreate, total=False): + pass + + diff --git a/src/typesense/types/analytics_rule.py b/src/typesense/types/analytics_rule_v1.py similarity index 98% rename from src/typesense/types/analytics_rule.py rename to src/typesense/types/analytics_rule_v1.py index af261bc..3f76046 100644 --- a/src/typesense/types/analytics_rule.py +++ b/src/typesense/types/analytics_rule_v1.py @@ -1,4 +1,4 @@ -"""Analytics Rule types for Typesense Python Client.""" +"""Analytics Rule V1 types for Typesense Python Client.""" import sys @@ -201,3 +201,5 @@ class RulesRetrieveSchema(typing.TypedDict): """ rules: typing.List[typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]] + + diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 9e8a397..2cb0d28 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -180,6 +180,7 @@ class CollectionCreateSchema(typing.TypedDict): token_separators: typing.NotRequired[typing.List[str]] enable_nested_fields: typing.NotRequired[bool] voice_query_model: typing.NotRequired[VoiceQueryModelSchema] + synonym_sets: typing.NotRequired[typing.List[typing.List[str]]] class CollectionSchema(CollectionCreateSchema): diff --git a/tests/analytics_events_test.py b/tests/analytics_events_test.py new file mode 100644 index 0000000..81af690 --- /dev/null +++ b/tests/analytics_events_test.py @@ -0,0 +1,140 @@ +"""Tests for Analytics events endpoints (client.analytics.events).""" +from __future__ import annotations + +import pytest + +from tests.utils.version import is_v30_or_above +from typesense.client import Client +import requests_mock + +from typesense.types.analytics import AnalyticsEvent + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Run analytics events tests only on v30+", +) + + +def test_actual_create_event(actual_client: Client, delete_all: None, create_collection: None, delete_all_analytics_rules: None) -> None: + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + actual_client.analytics.rules["company_analytics_rule"].delete() + + +def test_create_event(fake_client: Client) -> None: + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": {"user_id": "user-1", "q": "apple"}, + } + with requests_mock.Mocker() as mock: + mock.post("http://nearest:8108/analytics/events", json={"ok": True}) + resp = fake_client.analytics.events.create(event) + assert resp["ok"] is True + + +def test_status(actual_client: Client, delete_all: None) -> None: + status = actual_client.analytics.events.status() + assert isinstance(status, dict) + + +def test_retrieve_events(actual_client: Client, delete_all: None, delete_all_analytics_rules: None) -> None: + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + result = actual_client.analytics.events.retrieve( + user_id="user-1", + name="company_analytics_rule", + n=10, + ) + assert "events" in result + + + +def test_retrieve_events(fake_client: Client) -> None: + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/analytics/events", + json={"events": [{"name": "company_analytics_rule"}]}, + ) + result = fake_client.analytics.events.retrieve( + user_id="user-1", name="company_analytics_rule", n=10 + ) + assert "events" in result + +def test_acutal_retrieve_events(actual_client: Client, delete_all: None, create_collection: None, delete_all_analytics_rules: None) -> None: + actual_client.analytics.rules.create( + { + "name": "company_analytics_rule", + "type": "log", + "collection": "companies", + "event_type": "click", + "params": {}, + } + ) + event: AnalyticsEvent = { + "name": "company_analytics_rule", + "event_type": "query", + "data": { + "user_id": "user-1", + "doc_id": "apple", + }, + } + resp = actual_client.analytics.events.create(event) + assert resp["ok"] is True + result = actual_client.analytics.events.retrieve( + user_id="user-1", name="company_analytics_rule", n=10 + ) + assert "events" in result + +def test_acutal_flush(actual_client: Client, delete_all: None) -> None: + resp = actual_client.analytics.events.flush() + assert resp["ok"] in [True, False] + + +def test_flush(fake_client: Client) -> None: + with requests_mock.Mocker() as mock: + mock.post("http://nearest:8108/analytics/flush", json={"ok": True}) + resp = fake_client.analytics.events.flush() + assert resp["ok"] is True + + diff --git a/tests/analytics_rule_test.py b/tests/analytics_rule_test.py index 4141c55..68b9122 100644 --- a/tests/analytics_rule_test.py +++ b/tests/analytics_rule_test.py @@ -1,120 +1,67 @@ -"""Tests for the AnalyticsRule class.""" - +"""Unit tests for per-rule AnalyticsRule operations.""" from __future__ import annotations +import pytest import requests_mock -from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.client import Client from typesense.analytics_rule import AnalyticsRule from typesense.analytics_rules import AnalyticsRules -from typesense.api_call import ApiCall -from typesense.types.analytics_rule import RuleDeleteSchema, RuleSchemaForQueries - - -def test_init(fake_api_call: ApiCall) -> None: - """Test that the AnalyticsRule object is initialized correctly.""" - analytics_rule = AnalyticsRule(fake_api_call, "company_analytics_rule") - assert analytics_rule.rule_id == "company_analytics_rule" - assert_match_object(analytics_rule.api_call, fake_api_call) - assert_object_lists_match( - analytics_rule.api_call.node_manager.nodes, - fake_api_call.node_manager.nodes, - ) - assert_match_object( - analytics_rule.api_call.config.nearest_node, - fake_api_call.config.nearest_node, - ) - assert ( - analytics_rule._endpoint_path # noqa: WPS437 - == "/analytics/rules/company_analytics_rule" - ) +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Run analytics tests only on v30+", +) -def test_retrieve(fake_analytics_rule: AnalyticsRule) -> None: - """Test that the AnalyticsRule object can retrieve an analytics_rule.""" - json_response: RuleSchemaForQueries = { - "name": "company_analytics_rule", - "params": { - "destination": { - "collection": "companies_queries", - }, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - } +def test_rule_retrieve(fake_api_call) -> None: + rule = AnalyticsRule(fake_api_call, "company_analytics_rule") + expected = {"name": "company_analytics_rule"} with requests_mock.Mocker() as mock: mock.get( - "/analytics/rules/company_analytics_rule", - json=json_response, - ) - - response = fake_analytics_rule.retrieve() - - assert len(mock.request_history) == 1 - assert mock.request_history[0].method == "GET" - assert ( - mock.request_history[0].url - == "http://nearest:8108/analytics/rules/company_analytics_rule" + "http://nearest:8108/analytics/rules/company_analytics_rule", + json=expected, ) - assert response == json_response + resp = rule.retrieve() + assert resp == expected -def test_delete(fake_analytics_rule: AnalyticsRule) -> None: - """Test that the AnalyticsRule object can delete an analytics_rule.""" - json_response: RuleDeleteSchema = { - "name": "company_analytics_rule", - } +def test_rule_delete(fake_api_call) -> None: + rule = AnalyticsRule(fake_api_call, "company_analytics_rule") + expected = {"name": "company_analytics_rule"} with requests_mock.Mocker() as mock: mock.delete( - "/analytics/rules/company_analytics_rule", - json=json_response, + "http://nearest:8108/analytics/rules/company_analytics_rule", + json=expected, ) + resp = rule.delete() + assert resp == expected - response = fake_analytics_rule.delete() - assert len(mock.request_history) == 1 - assert mock.request_history[0].method == "DELETE" - assert ( - mock.request_history[0].url - == "http://nearest:8108/analytics/rules/company_analytics_rule" - ) - assert response == json_response - - -def test_actual_retrieve( +def test_actual_rule_retrieve( actual_analytics_rules: AnalyticsRules, delete_all: None, delete_all_analytics_rules: None, create_analytics_rule: None, ) -> None: - """Test that the AnalyticsRule object can retrieve a rule from Typesense Server.""" - response = actual_analytics_rules["company_analytics_rule"].retrieve() + resp = actual_analytics_rules["company_analytics_rule"].retrieve() + assert resp["name"] == "company_analytics_rule" - expected: RuleSchemaForQueries = { - "name": "company_analytics_rule", - "params": { - "destination": {"collection": "companies_queries"}, - "limit": 1000, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - } - assert response == expected - - -def test_actual_delete( +def test_actual_rule_delete( actual_analytics_rules: AnalyticsRules, delete_all: None, delete_all_analytics_rules: None, create_analytics_rule: None, ) -> None: - """Test that the AnalyticsRule object can delete a rule from Typesense Server.""" - response = actual_analytics_rules["company_analytics_rule"].delete() + resp = actual_analytics_rules["company_analytics_rule"].delete() + assert resp["name"] == "company_analytics_rule" + - expected: RuleDeleteSchema = { - "name": "company_analytics_rule", - } - assert response == expected diff --git a/tests/analytics_rule_v1_test.py b/tests/analytics_rule_v1_test.py new file mode 100644 index 0000000..8cc970b --- /dev/null +++ b/tests/analytics_rule_v1_test.py @@ -0,0 +1,129 @@ +"""Tests for the AnalyticsRuleV1 class.""" +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.client import Client +from typesense.analytics_rule_v1 import AnalyticsRuleV1 +from typesense.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.api_call import ApiCall +from typesense.types.analytics_rule_v1 import RuleDeleteSchema, RuleSchemaForQueries + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRuleV1 object is initialized correctly.""" + analytics_rule = AnalyticsRuleV1(fake_api_call, "company_analytics_rule") + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_api_call) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_retrieve(fake_analytics_rule: AnalyticsRuleV1) -> None: + """Test that the AnalyticsRuleV1 object can retrieve an analytics_rule.""" + json_response: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + with requests_mock.Mocker() as mock: + mock.get( + "/analytics/rules/company_analytics_rule", + json=json_response, + ) + + response = fake_analytics_rule.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/analytics/rules/company_analytics_rule" + ) + assert response == json_response + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_delete(fake_analytics_rule: AnalyticsRuleV1) -> None: + """Test that the AnalyticsRuleV1 object can delete an analytics_rule.""" + json_response: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/analytics/rules/company_analytics_rule", + json=json_response, + ) + + response = fake_analytics_rule.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/analytics/rules/company_analytics_rule" + ) + assert response == json_response + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRuleV1 object can retrieve a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].retrieve() + + expected: RuleSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + assert response == expected + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_actual_delete( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRuleV1 object can delete a rule from Typesense Server.""" + response = actual_analytics_rules["company_analytics_rule"].delete() + + expected: RuleDeleteSchema = { + "name": "company_analytics_rule", + } + assert response == expected + + diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py index edad1d8..ef67bb6 100644 --- a/tests/analytics_rules_test.py +++ b/tests/analytics_rules_test.py @@ -1,141 +1,87 @@ -"""Tests for the AnalyticsRules class.""" - +"""Tests for v30 Analytics Rules endpoints (client.analytics.rules).""" from __future__ import annotations +import pytest import requests_mock -from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.client import Client from typesense.analytics_rules import AnalyticsRules -from typesense.api_call import ApiCall -from typesense.types.analytics_rule import ( - RuleCreateSchemaForQueries, - RulesRetrieveSchema, +from typesense.analytics_rule import AnalyticsRule +from typesense.types.analytics import AnalyticsRuleCreate + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Run v30 analytics tests only on v30+", ) -def test_init(fake_api_call: ApiCall) -> None: - """Test that the AnalyticsRules object is initialized correctly.""" - analytics_rules = AnalyticsRules(fake_api_call) - - assert_match_object(analytics_rules.api_call, fake_api_call) - assert_object_lists_match( - analytics_rules.api_call.node_manager.nodes, - fake_api_call.node_manager.nodes, - ) - assert_match_object( - analytics_rules.api_call.config.nearest_node, - fake_api_call.config.nearest_node, - ) - - assert not analytics_rules.rules - - -def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRules) -> None: - """Test that the AnalyticsRules object can get a missing analytics_rule.""" - analytics_rule = fake_analytics_rules["company_analytics_rule"] - - assert analytics_rule.rule_id == "company_analytics_rule" - assert_match_object(analytics_rule.api_call, fake_analytics_rules.api_call) - assert_object_lists_match( - analytics_rule.api_call.node_manager.nodes, - fake_analytics_rules.api_call.node_manager.nodes, - ) - assert_match_object( - analytics_rule.api_call.config.nearest_node, - fake_analytics_rules.api_call.config.nearest_node, - ) - assert ( - analytics_rule._endpoint_path # noqa: WPS437 - == "/analytics/rules/company_analytics_rule" - ) - - -def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRules) -> None: - """Test that the AnalyticsRules object can get an existing analytics_rule.""" - analytics_rule = fake_analytics_rules["company_analytics_rule"] - fetched_analytics_rule = fake_analytics_rules["company_analytics_rule"] +def test_rules_init(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + assert rules.rules == {} - assert len(fake_analytics_rules.rules) == 1 - assert analytics_rule is fetched_analytics_rule +def test_rule_getitem(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + rule = rules["company_analytics_rule"] + assert isinstance(rule, AnalyticsRule) + assert rule._endpoint_path == "/analytics/rules/company_analytics_rule" -def test_retrieve(fake_analytics_rules: AnalyticsRules) -> None: - """Test that the AnalyticsRules object can retrieve analytics_rules.""" - json_response: RulesRetrieveSchema = { - "rules": [ - { - "name": "company_analytics_rule", - "params": { - "destination": { - "collection": "companies_queries", - }, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - }, - ], +def test_rules_create(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + body: AnalyticsRuleCreate = { + "name": "company_analytics_rule", + "type": "popular_queries", + "collection": "companies", + "event_type": "query", + "params": {"destination_collection": "companies_queries", "limit": 1000}, } + with requests_mock.Mocker() as mock: + mock.post("http://nearest:8108/analytics/rules", json=body) + resp = rules.create(body) + assert resp == body + +def test_rules_retrieve_with_tag(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) with requests_mock.Mocker() as mock: mock.get( - "http://nearest:8108/analytics/rules", - json=json_response, + "http://nearest:8108/analytics/rules?rule_tag=homepage", + json=[{"name": "rule1", "rule_tag": "homepage"}], ) + resp = rules.retrieve(rule_tag="homepage") + assert isinstance(resp, list) + assert resp[0]["rule_tag"] == "homepage" - response = fake_analytics_rules.retrieve() - - assert len(response) == 1 - assert response["rules"][0] == json_response.get("rules")[0] - assert response == json_response +def test_rules_upsert(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/analytics/rules/company_analytics_rule", + json={"name": "company_analytics_rule"}, + ) + resp = rules.upsert("company_analytics_rule", {"params": {}}) + assert resp["name"] == "company_analytics_rule" -def test_create(fake_analytics_rules: AnalyticsRules) -> None: - """Test that the AnalyticsRules object can create a analytics_rule.""" - json_response: RuleCreateSchemaForQueries = { - "name": "company_analytics_rule", - "params": { - "destination": { - "collection": "companies_queries", - }, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - } +def test_rules_retrieve(fake_api_call) -> None: + rules = AnalyticsRules(fake_api_call) with requests_mock.Mocker() as mock: - mock.post( + mock.get( "http://nearest:8108/analytics/rules", - json=json_response, + json=[{"name": "company_analytics_rule"}], ) - - fake_analytics_rules.create( - rule={ - "params": { - "destination": { - "collection": "companies_queries", - }, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - "name": "company_analytics_rule", - }, - ) - - assert mock.call_count == 1 - assert mock.called is True - assert mock.last_request.method == "POST" - assert mock.last_request.url == "http://nearest:8108/analytics/rules" - assert mock.last_request.json() == { - "params": { - "destination": { - "collection": "companies_queries", - }, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - "name": "company_analytics_rule", - } + resp = rules.retrieve() + assert isinstance(resp, list) + assert resp[0]["name"] == "company_analytics_rule" def test_actual_create( @@ -145,28 +91,16 @@ def test_actual_create( create_collection: None, create_query_collection: None, ) -> None: - """Test that the AnalyticsRules object can create an analytics_rule on Typesense Server.""" - response = actual_analytics_rules.create( - rule={ - "name": "company_analytics_rule", - "type": "nohits_queries", - "params": { - "source": { - "collections": ["companies"], - }, - "destination": {"collection": "companies_queries"}, - }, - }, - ) - - assert response == { + body: AnalyticsRuleCreate = { "name": "company_analytics_rule", "type": "nohits_queries", - "params": { - "source": {"collections": ["companies"]}, - "destination": {"collection": "companies_queries"}, - }, + "collection": "companies", + "event_type": "query", + "params": {"destination_collection": "companies_queries", "limit": 1000}, } + resp = actual_analytics_rules.create(rule=body) + assert resp["name"] == "company_analytics_rule" + assert resp["params"]["destination_collection"] == "companies_queries" def test_actual_update( @@ -175,28 +109,16 @@ def test_actual_update( delete_all_analytics_rules: None, create_analytics_rule: None, ) -> None: - """Test that the AnalyticsRules object can update an analytics_rule on Typesense Server.""" - response = actual_analytics_rules.upsert( + resp = actual_analytics_rules.upsert( "company_analytics_rule", { - "type": "popular_queries", "params": { - "source": { - "collections": ["companies"], - }, - "destination": {"collection": "companies_queries"}, + "destination_collection": "companies_queries", + "limit": 500, }, }, ) - - assert response == { - "name": "company_analytics_rule", - "type": "popular_queries", - "params": { - "source": {"collections": ["companies"]}, - "destination": {"collection": "companies_queries"}, - }, - } + assert resp["name"] == "company_analytics_rule" def test_actual_retrieve( @@ -205,18 +127,8 @@ def test_actual_retrieve( delete_all_analytics_rules: None, create_analytics_rule: None, ) -> None: - """Test that the AnalyticsRules object can retrieve the rules from Typesense Server.""" - response = actual_analytics_rules.retrieve() - assert len(response["rules"]) == 1 - assert_match_object( - response["rules"][0], - { - "name": "company_analytics_rule", - "params": { - "destination": {"collection": "companies_queries"}, - "limit": 1000, - "source": {"collections": ["companies"]}, - }, - "type": "nohits_queries", - }, - ) + rules = actual_analytics_rules.retrieve() + assert isinstance(rules, list) + assert any(r.get("name") == "company_analytics_rule" for r in rules) + + diff --git a/tests/analytics_rules_v1_test.py b/tests/analytics_rules_v1_test.py new file mode 100644 index 0000000..674ac34 --- /dev/null +++ b/tests/analytics_rules_v1_test.py @@ -0,0 +1,234 @@ +"""Tests for the AnalyticsRulesV1 class.""" +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.client import Client +from typesense.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.api_call import ApiCall +from typesense.types.analytics_rule_v1 import ( + RuleCreateSchemaForQueries, + RulesRetrieveSchema, +) + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsRulesV1 object is initialized correctly.""" + analytics_rules = AnalyticsRulesV1(fake_api_call) + + assert_match_object(analytics_rules.api_call, fake_api_call) + assert_object_lists_match( + analytics_rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics_rules.rules + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can get a missing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert analytics_rule.rule_id == "company_analytics_rule" + assert_match_object(analytics_rule.api_call, fake_analytics_rules.api_call) + assert_object_lists_match( + analytics_rule.api_call.node_manager.nodes, + fake_analytics_rules.api_call.node_manager.nodes, + ) + assert_match_object( + analytics_rule.api_call.config.nearest_node, + fake_analytics_rules.api_call.config.nearest_node, + ) + assert ( + analytics_rule._endpoint_path # noqa: WPS437 + == "/analytics/rules/company_analytics_rule" + ) + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can get an existing analytics_rule.""" + analytics_rule = fake_analytics_rules["company_analytics_rule"] + fetched_analytics_rule = fake_analytics_rules["company_analytics_rule"] + + assert len(fake_analytics_rules.rules) == 1 + + assert analytics_rule is fetched_analytics_rule + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_retrieve(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can retrieve analytics_rules.""" + json_response: RulesRetrieveSchema = { + "rules": [ + { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ], + } + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/analytics/rules", + json=json_response, + ) + + response = fake_analytics_rules.retrieve() + + assert len(response) == 1 + assert response["rules"][0] == json_response.get("rules")[0] + assert response == json_response + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_create(fake_analytics_rules: AnalyticsRulesV1) -> None: + """Test that the AnalyticsRulesV1 object can create a analytics_rule.""" + json_response: RuleCreateSchemaForQueries = { + "name": "company_analytics_rule", + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + } + + with requests_mock.Mocker() as mock: + mock.post( + "http://nearest:8108/analytics/rules", + json=json_response, + ) + + fake_analytics_rules.create( + rule={ + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + "name": "company_analytics_rule", + }, + ) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "POST" + assert mock.last_request.url == "http://nearest:8108/analytics/rules" + assert mock.last_request.json() == { + "params": { + "destination": { + "collection": "companies_queries", + }, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + "name": "company_analytics_rule", + } + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_actual_create( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_collection: None, + create_query_collection: None, +) -> None: + """Test that the AnalyticsRulesV1 object can create an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.create( + rule={ + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_actual_update( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRulesV1 object can update an analytics_rule on Typesense Server.""" + response = actual_analytics_rules.upsert( + "company_analytics_rule", + { + "type": "popular_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + }, + ) + + assert response == { + "name": "company_analytics_rule", + "type": "popular_queries", + "params": { + "source": {"collections": ["companies"]}, + "destination": {"collection": "companies_queries"}, + }, + } + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_actual_retrieve( + actual_analytics_rules: AnalyticsRulesV1, + delete_all: None, + delete_all_analytics_rules_v1: None, + create_analytics_rule_v1: None, +) -> None: + """Test that the AnalyticsRulesV1 object can retrieve the rules from Typesense Server.""" + response = actual_analytics_rules.retrieve() + assert len(response["rules"]) == 1 + assert_match_object( + response["rules"][0], + { + "name": "company_analytics_rule", + "params": { + "destination": {"collection": "companies_queries"}, + "limit": 1000, + "source": {"collections": ["companies"]}, + }, + "type": "nohits_queries", + }, + ) + + diff --git a/tests/analytics_test.py b/tests/analytics_test.py index e2e4441..5d9e56d 100644 --- a/tests/analytics_test.py +++ b/tests/analytics_test.py @@ -1,12 +1,15 @@ -"""Tests for the Analytics class.""" - +"""Tests for the AnalyticsV1 class.""" +import pytest +from tests.utils.version import is_v30_or_above +from typesense.client import Client from tests.utils.object_assertions import assert_match_object, assert_object_lists_match from typesense.analytics import Analytics from typesense.api_call import ApiCall +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_init(fake_api_call: ApiCall) -> None: - """Test that the Analytics object is initialized correctly.""" + """Test that the AnalyticsV1 object is initialized correctly.""" analytics = Analytics(fake_api_call) assert_match_object(analytics.rules.api_call, fake_api_call) diff --git a/tests/analytics_v1_test.py b/tests/analytics_v1_test.py new file mode 100644 index 0000000..50b9339 --- /dev/null +++ b/tests/analytics_v1_test.py @@ -0,0 +1,27 @@ +"""Tests for the AnalyticsV1 class.""" +import pytest +from tests.utils.version import is_v30_or_above +from typesense.client import Client +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from typesense.analytics_v1 import AnalyticsV1 +from typesense.api_call import ApiCall + + +@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +def test_init(fake_api_call: ApiCall) -> None: + """Test that the AnalyticsV1 object is initialized correctly.""" + analytics = AnalyticsV1(fake_api_call) + + assert_match_object(analytics.rules.api_call, fake_api_call) + assert_object_lists_match( + analytics.rules.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + analytics.rules.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + assert not analytics.rules.rules + + diff --git a/tests/client_test.py b/tests/client_test.py index b25f9e9..3997939 100644 --- a/tests/client_test.py +++ b/tests/client_test.py @@ -27,9 +27,9 @@ def test_client_init(fake_config_dict: ConfigDict) -> None: assert fake_client.keys.keys is not None assert fake_client.aliases assert fake_client.aliases.aliases is not None - assert fake_client.analytics - assert fake_client.analytics.rules - assert fake_client.analytics.rules.rules is not None + assert fake_client.analyticsV1 + assert fake_client.analyticsV1.rules + assert fake_client.analyticsV1.rules.rules is not None assert fake_client.operations assert fake_client.debug diff --git a/tests/collection_test.py b/tests/collection_test.py index 33c7837..49e6422 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -218,6 +218,7 @@ def test_actual_retrieve( "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] } response.pop("created_at") diff --git a/tests/collections_test.py b/tests/collections_test.py index 84971bd..a68b468 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -86,6 +86,7 @@ def test_retrieve(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] }, { "created_at": 1619711488, @@ -105,6 +106,7 @@ def test_retrieve(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] }, ] with requests_mock.Mocker() as mock: @@ -138,6 +140,7 @@ def test_create(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] } with requests_mock.Mocker() as mock: @@ -220,6 +223,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] } response = actual_collections.create( @@ -288,6 +292,7 @@ def test_actual_retrieve( "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [] }, ] diff --git a/tests/fixtures/analytics_rule_fixtures.py b/tests/fixtures/analytics_fixtures.py similarity index 75% rename from tests/fixtures/analytics_rule_fixtures.py rename to tests/fixtures/analytics_fixtures.py index 2f92008..d0f7715 100644 --- a/tests/fixtures/analytics_rule_fixtures.py +++ b/tests/fixtures/analytics_fixtures.py @@ -1,4 +1,4 @@ -"""Fixtures for the Analytics Rules tests.""" +"""Fixtures for Analytics (current) tests.""" import pytest import requests @@ -10,19 +10,18 @@ @pytest.fixture(scope="function", name="delete_all_analytics_rules") def clear_typesense_analytics_rules() -> None: - """Remove all analytics_rules from the Typesense server.""" + """Remove all analytics rules from the Typesense server.""" url = "http://localhost:8108/analytics/rules" headers = {"X-TYPESENSE-API-KEY": "xyz"} - # Get the list of rules response = requests.get(url, headers=headers, timeout=3) response.raise_for_status() - analytics_rules = response.json() + rules = response.json() - # Delete each analytics_rule - for analytics_rule_set in analytics_rules["rules"]: - analytics_rule_id = analytics_rule_set.get("name") - delete_url = f"{url}/{analytics_rule_id}" + # v30 returns a list of rule objects + for rule in rules: + rule_name = rule.get("name") + delete_url = f"{url}/{rule_name}" delete_response = requests.delete(delete_url, headers=headers, timeout=3) delete_response.raise_for_status() @@ -32,17 +31,17 @@ def create_analytics_rule_fixture( create_collection: None, create_query_collection: None, ) -> None: - """Create a collection in the Typesense server.""" + """Create an analytics rule in the Typesense server.""" url = "http://localhost:8108/analytics/rules" headers = {"X-TYPESENSE-API-KEY": "xyz"} analytics_rule_data = { "name": "company_analytics_rule", "type": "nohits_queries", + "collection": "companies", + "event_type": "query", "params": { - "source": { - "collections": ["companies"], - }, - "destination": {"collection": "companies_queries"}, + "destination_collection": "companies_queries", + "limit": 1000, }, } @@ -52,22 +51,21 @@ def create_analytics_rule_fixture( @pytest.fixture(scope="function", name="fake_analytics_rules") def fake_analytics_rules_fixture(fake_api_call: ApiCall) -> AnalyticsRules: - """Return a AnalyticsRule object with test values.""" + """Return an AnalyticsRules object with test values.""" return AnalyticsRules(fake_api_call) @pytest.fixture(scope="function", name="actual_analytics_rules") def actual_analytics_rules_fixture(actual_api_call: ApiCall) -> AnalyticsRules: - """Return a AnalyticsRules object using a real API.""" + """Return an AnalyticsRules object using a real API.""" return AnalyticsRules(actual_api_call) @pytest.fixture(scope="function", name="fake_analytics_rule") def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: - """Return a AnalyticsRule object with test values.""" + """Return an AnalyticsRule object with test values.""" return AnalyticsRule(fake_api_call, "company_analytics_rule") - @pytest.fixture(scope="function", name="create_query_collection") def create_query_collection_fixture() -> None: """Create a query collection for analytics rules in the Typesense server.""" @@ -93,4 +91,4 @@ def create_query_collection_fixture() -> None: json=query_collection_data, timeout=3, ) - response.raise_for_status() + response.raise_for_status() \ No newline at end of file diff --git a/tests/fixtures/analytics_rule_v1_fixtures.py b/tests/fixtures/analytics_rule_v1_fixtures.py new file mode 100644 index 0000000..44994eb --- /dev/null +++ b/tests/fixtures/analytics_rule_v1_fixtures.py @@ -0,0 +1,70 @@ +"""Fixtures for the Analytics Rules V1 tests.""" + +import pytest +import requests + +from typesense.analytics_rule_v1 import AnalyticsRuleV1 +from typesense.analytics_rules_v1 import AnalyticsRulesV1 +from typesense.api_call import ApiCall + + +@pytest.fixture(scope="function", name="delete_all_analytics_rules_v1") +def clear_typesense_analytics_rules_v1() -> None: + """Remove all analytics_rules from the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of rules + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + analytics_rules = response.json() + + # Delete each analytics_rule + for analytics_rule_set in analytics_rules["rules"]: + analytics_rule_id = analytics_rule_set.get("name") + delete_url = f"{url}/{analytics_rule_id}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="create_analytics_rule_v1") +def create_analytics_rule_v1_fixture( + create_collection: None, + create_query_collection: None, +) -> None: + """Create a collection in the Typesense server.""" + url = "http://localhost:8108/analytics/rules" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + analytics_rule_data = { + "name": "company_analytics_rule", + "type": "nohits_queries", + "params": { + "source": { + "collections": ["companies"], + }, + "destination": {"collection": "companies_queries"}, + }, + } + + response = requests.post(url, headers=headers, json=analytics_rule_data, timeout=3) + response.raise_for_status() + + +@pytest.fixture(scope="function", name="fake_analytics_rules_v1") +def fake_analytics_rules_v1_fixture(fake_api_call: ApiCall) -> AnalyticsRulesV1: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRulesV1(fake_api_call) + + +@pytest.fixture(scope="function", name="actual_analytics_rules_v1") +def actual_analytics_rules_v1_fixture(actual_api_call: ApiCall) -> AnalyticsRulesV1: + """Return a AnalyticsRules object using a real API.""" + return AnalyticsRulesV1(actual_api_call) + + +@pytest.fixture(scope="function", name="fake_analytics_rule_v1") +def fake_analytics_rule_v1_fixture(fake_api_call: ApiCall) -> AnalyticsRuleV1: + """Return a AnalyticsRule object with test values.""" + return AnalyticsRuleV1(fake_api_call, "company_analytics_rule") + + diff --git a/tests/import_test.py b/tests/import_test.py index 616ec11..b33bb39 100644 --- a/tests/import_test.py +++ b/tests/import_test.py @@ -10,7 +10,7 @@ typing_module_names = [ "alias", - "analytics_rule", + "analytics_rule_v1", "collection", "conversations_model", "debug", @@ -25,8 +25,8 @@ module_names = [ "aliases", - "analytics_rule", - "analytics_rules", + "analytics_rule_v1", + "analytics_rules_v1", "api_call", "client", "collection", diff --git a/tests/synonym_test.py b/tests/synonym_test.py index 98caa08..d25d937 100644 --- a/tests/synonym_test.py +++ b/tests/synonym_test.py @@ -2,6 +2,7 @@ from __future__ import annotations +import pytest import requests_mock from tests.utils.object_assertions import ( @@ -9,12 +10,29 @@ assert_object_lists_match, assert_to_contain_object, ) +from tests.utils.version import is_v30_or_above from typesense.api_call import ApiCall from typesense.collections import Collections +from typesense.client import Client from typesense.synonym import Synonym, SynonymDeleteSchema from typesense.synonyms import SynonymSchema +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Skip synonym tests on v30+", +) + + def test_init(fake_api_call: ApiCall) -> None: """Test that the Synonym object is initialized correctly.""" synonym = Synonym(fake_api_call, "companies", "company_synonym") diff --git a/tests/synonyms_test.py b/tests/synonyms_test.py index 2071dbc..81ae716 100644 --- a/tests/synonyms_test.py +++ b/tests/synonyms_test.py @@ -2,6 +2,7 @@ from __future__ import annotations +import pytest import requests_mock from tests.utils.object_assertions import ( @@ -11,9 +12,26 @@ ) from typesense.api_call import ApiCall from typesense.collections import Collections +from tests.utils.version import is_v30_or_above +from typesense.client import Client from typesense.synonyms import Synonyms, SynonymSchema, SynonymsRetrieveSchema +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Skip synonyms tests on v30+", +) + + def test_init(fake_api_call: ApiCall) -> None: """Test that the Synonyms object is initialized correctly.""" synonyms = Synonyms(fake_api_call, "companies") diff --git a/tests/utils/version.py b/tests/utils/version.py new file mode 100644 index 0000000..ba3ca93 --- /dev/null +++ b/tests/utils/version.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from typesense.client import Client + + +def is_v30_or_above(client: Client) -> bool: + try: + debug = client.debug.retrieve() + version = debug.get("version") + if version == "nightly": + return True + try: + numbered = str(version).split("v")[1] + return int(numbered) >= 30 + except Exception: + return False + except Exception: + return False + + From 47b4c42711bb9af21196c953610e147911e24cae Mon Sep 17 00:00:00 2001 From: Harisaran G Date: Tue, 26 Aug 2025 16:03:20 +0530 Subject: [PATCH 258/288] add: synonym_set APIs --- src/typesense/analytics_v1.py | 16 ++- src/typesense/client.py | 2 + src/typesense/synonym.py | 14 +++ src/typesense/synonym_set.py | 43 +++++++ src/typesense/synonym_sets.py | 50 ++++++++ src/typesense/synonyms.py | 14 +++ src/typesense/types/synonym_set.py | 72 +++++++++++ tests/analytics_test.py | 2 +- tests/fixtures/synonym_set_fixtures.py | 73 +++++++++++ tests/import_test.py | 3 + tests/synonym_set_test.py | 127 +++++++++++++++++++ tests/synonym_sets_test.py | 163 +++++++++++++++++++++++++ 12 files changed, 577 insertions(+), 2 deletions(-) create mode 100644 src/typesense/synonym_set.py create mode 100644 src/typesense/synonym_sets.py create mode 100644 src/typesense/types/synonym_set.py create mode 100644 tests/fixtures/synonym_set_fixtures.py create mode 100644 tests/synonym_set_test.py create mode 100644 tests/synonym_sets_test.py diff --git a/src/typesense/analytics_v1.py b/src/typesense/analytics_v1.py index b75bfbb..cbacc4b 100644 --- a/src/typesense/analytics_v1.py +++ b/src/typesense/analytics_v1.py @@ -19,6 +19,9 @@ from typesense.analytics_rules_v1 import AnalyticsRulesV1 from typesense.api_call import ApiCall +from typesense.logger import logger + +_analytics_v1_deprecation_warned = False class AnalyticsV1(object): @@ -39,6 +42,17 @@ def __init__(self, api_call: ApiCall) -> None: Args: api_call (ApiCall): The API call object for making requests. """ - self.rules = AnalyticsRulesV1(api_call) + self._rules = AnalyticsRulesV1(api_call) + + @property + def rules(self) -> AnalyticsRulesV1: + global _analytics_v1_deprecation_warned + if not _analytics_v1_deprecation_warned: + logger.warning( + "AnalyticsV1 is deprecated and will be removed in a future release. " + "Use client.analytics instead." + ) + _analytics_v1_deprecation_warned = True + return self._rules diff --git a/src/typesense/client.py b/src/typesense/client.py index d5d7dee..92354b2 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -51,6 +51,7 @@ from typesense.operations import Operations from typesense.stemming import Stemming from typesense.stopwords import Stopwords +from typesense.synonym_sets import SynonymSets TDoc = typing.TypeVar("TDoc", bound=DocumentSchema) @@ -109,6 +110,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) + self.synonym_sets = SynonymSets(self.api_call) self.metrics = Metrics(self.api_call) self.conversations_models = ConversationsModels(self.api_call) self.nl_search_models = NLSearchModels(self.api_call) diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 096affc..4d5b73b 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -22,6 +22,9 @@ """ from typesense.api_call import ApiCall +from typesense.logger import logger + +_synonym_deprecation_warned = False from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema @@ -63,6 +66,7 @@ def retrieve(self) -> SynonymSchema: Returns: SynonymSchema: The schema containing the synonym details. """ + self._maybe_warn_deprecation() return self.api_call.get(self._endpoint_path(), entity_type=SynonymSchema) def delete(self) -> SynonymDeleteSchema: @@ -72,6 +76,7 @@ def delete(self) -> SynonymDeleteSchema: Returns: SynonymDeleteSchema: The schema containing the deletion response. """ + self._maybe_warn_deprecation() return self.api_call.delete( self._endpoint_path(), entity_type=SynonymDeleteSchema, @@ -95,3 +100,12 @@ def _endpoint_path(self) -> str: self.synonym_id, ], ) + + def _maybe_warn_deprecation(self) -> None: + global _synonym_deprecation_warned + if not _synonym_deprecation_warned: + logger.warning( + "The synonyms API (collections/{collection}/synonyms) is deprecated and will be " + "removed in a future release. Use synonym sets (synonym_sets) instead." + ) + _synonym_deprecation_warned = True diff --git a/src/typesense/synonym_set.py b/src/typesense/synonym_set.py new file mode 100644 index 0000000..c6c6b3b --- /dev/null +++ b/src/typesense/synonym_set.py @@ -0,0 +1,43 @@ +"""Client for single Synonym Set operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.synonym_set import ( + SynonymSetDeleteSchema, + SynonymSetRetrieveSchema, +) + + +class SynonymSet: + def __init__(self, api_call: ApiCall, name: str) -> None: + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + from typesense.synonym_sets import SynonymSets + + return "/".join([SynonymSets.resource_path, self.name]) + + def retrieve(self) -> SynonymSetRetrieveSchema: + response: SynonymSetRetrieveSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=SynonymSetRetrieveSchema, + ) + return response + + def delete(self) -> SynonymSetDeleteSchema: + response: SynonymSetDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=SynonymSetDeleteSchema, + ) + return response + + diff --git a/src/typesense/synonym_sets.py b/src/typesense/synonym_sets.py new file mode 100644 index 0000000..a1a38e5 --- /dev/null +++ b/src/typesense/synonym_sets.py @@ -0,0 +1,50 @@ +"""Client for Synonym Sets collection operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.synonym_set import ( + SynonymSetCreateSchema, + SynonymSetDeleteSchema, + SynonymSetRetrieveSchema, + SynonymSetSchema, +) + + +class SynonymSets: + resource_path: typing.Final[str] = "/synonym_sets" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + + def retrieve(self) -> typing.List[SynonymSetSchema]: + response: typing.List[SynonymSetSchema] = self.api_call.get( + SynonymSets.resource_path, + as_json=True, + entity_type=typing.List[SynonymSetSchema], + ) + return response + + def __getitem__(self, synonym_set_name: str) -> "SynonymSet": + from typesense.synonym_set import SynonymSet as PerSet + + return PerSet(self.api_call, synonym_set_name) + + def upsert( + self, + synonym_set_name: str, + payload: SynonymSetCreateSchema, + ) -> SynonymSetSchema: + response: SynonymSetSchema = self.api_call.put( + "/".join([SynonymSets.resource_path, synonym_set_name]), + body=payload, + entity_type=SynonymSetSchema, + ) + return response + + diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index abd6211..c1bd6b7 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -34,6 +34,9 @@ SynonymSchema, SynonymsRetrieveSchema, ) +from typesense.logger import logger + +_synonyms_deprecation_warned = False if sys.version_info >= (3, 11): import typing @@ -98,6 +101,7 @@ def upsert(self, synonym_id: str, schema: SynonymCreateSchema) -> SynonymSchema: Returns: SynonymSchema: The created or updated synonym. """ + self._maybe_warn_deprecation() response = self.api_call.put( self._endpoint_path(synonym_id), body=schema, @@ -112,6 +116,7 @@ def retrieve(self) -> SynonymsRetrieveSchema: Returns: SynonymsRetrieveSchema: The schema containing all synonyms. """ + self._maybe_warn_deprecation() response = self.api_call.get( self._endpoint_path(), entity_type=SynonymsRetrieveSchema, @@ -139,3 +144,12 @@ def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: synonym_id, ], ) + + def _maybe_warn_deprecation(self) -> None: + global _synonyms_deprecation_warned + if not _synonyms_deprecation_warned: + logger.warning( + "The synonyms API (collections/{collection}/synonyms) is deprecated and will be " + "removed in a future release. Use synonym sets (synonym_sets) instead." + ) + _synonyms_deprecation_warned = True diff --git a/src/typesense/types/synonym_set.py b/src/typesense/types/synonym_set.py new file mode 100644 index 0000000..c786d6b --- /dev/null +++ b/src/typesense/types/synonym_set.py @@ -0,0 +1,72 @@ +"""Synonym Set types for Typesense Python Client.""" + +import sys + +from typesense.types.collection import Locales + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class SynonymItemSchema(typing.TypedDict): + """ + Schema representing an individual synonym item inside a synonym set. + + Attributes: + id (str): Unique identifier for the synonym item. + synonyms (list[str]): The synonyms array. + root (str, optional): For 1-way synonyms, indicates the root word that words in + the synonyms parameter map to. + locale (Locales, optional): Locale for the synonym. + symbols_to_index (list[str], optional): Symbols to index as-is in synonyms. + """ + + id: str + synonyms: typing.List[str] + root: typing.NotRequired[str] + locale: typing.NotRequired[Locales] + symbols_to_index: typing.NotRequired[typing.List[str]] + + +class SynonymSetCreateSchema(typing.TypedDict): + """ + Schema for creating or updating a synonym set. + + Attributes: + items (list[SynonymItemSchema]): Array of synonym items. + """ + + items: typing.List[SynonymItemSchema] + + +class SynonymSetSchema(SynonymSetCreateSchema): + """ + Schema representing a synonym set. + + Attributes: + name (str): Name of the synonym set. + """ + + name: str + + +class SynonymSetsRetrieveSchema(typing.List[SynonymSetSchema]): + """Deprecated alias for list of synonym sets; use List[SynonymSetSchema] directly.""" + + +class SynonymSetRetrieveSchema(SynonymSetCreateSchema): + """Response schema for retrieving a single synonym set by name.""" + + +class SynonymSetDeleteSchema(typing.TypedDict): + """Response schema for deleting a synonym set. + + Attributes: + name (str): Name of the deleted synonym set. + """ + + name: str + + diff --git a/tests/analytics_test.py b/tests/analytics_test.py index 5d9e56d..a7e2276 100644 --- a/tests/analytics_test.py +++ b/tests/analytics_test.py @@ -7,7 +7,7 @@ from typesense.api_call import ApiCall -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +@pytest.mark.skipif(not is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsV1 object is initialized correctly.""" analytics = Analytics(fake_api_call) diff --git a/tests/fixtures/synonym_set_fixtures.py b/tests/fixtures/synonym_set_fixtures.py new file mode 100644 index 0000000..c4c4341 --- /dev/null +++ b/tests/fixtures/synonym_set_fixtures.py @@ -0,0 +1,73 @@ +"""Fixtures for the synonym set tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.synonym_set import SynonymSet +from typesense.synonym_sets import SynonymSets + + +@pytest.fixture(scope="function", name="create_synonym_set") +def create_synonym_set_fixture() -> None: + """Create a synonym set in the Typesense server.""" + url = "http://localhost:8108/synonym_sets/test-set" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + } + + resp = requests.put(url, headers=headers, json=data, timeout=3) + resp.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_synonym_sets") +def clear_typesense_synonym_sets() -> None: + """Remove all synonym sets from the Typesense server.""" + url = "http://localhost:8108/synonym_sets" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + # Get the list of synonym sets + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + data = response.json() + + # Delete each synonym set + for synset in data: + name = synset.get("name") + if not name: + continue + delete_url = f"{url}/{name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_synonym_sets") +def actual_synonym_sets_fixture(actual_api_call: ApiCall) -> SynonymSets: + """Return a SynonymSets object using a real API.""" + return SynonymSets(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_synonym_set") +def actual_synonym_set_fixture(actual_api_call: ApiCall) -> SynonymSet: + """Return a SynonymSet object using a real API.""" + return SynonymSet(actual_api_call, "test-set") + + +@pytest.fixture(scope="function", name="fake_synonym_sets") +def fake_synonym_sets_fixture(fake_api_call: ApiCall) -> SynonymSets: + """Return a SynonymSets object with test values.""" + return SynonymSets(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_synonym_set") +def fake_synonym_set_fixture(fake_api_call: ApiCall) -> SynonymSet: + """Return a SynonymSet object with test values.""" + return SynonymSet(fake_api_call, "test-set") + + diff --git a/tests/import_test.py b/tests/import_test.py index b33bb39..9aec70e 100644 --- a/tests/import_test.py +++ b/tests/import_test.py @@ -20,6 +20,7 @@ "operations", "override", "stopword", + "synonym_set", "synonym", ] @@ -41,6 +42,8 @@ "overrides", "operations", "synonyms", + "synonym_set", + "synonym_sets", "preprocess", "stopwords", ] diff --git a/tests/synonym_set_test.py b/tests/synonym_set_test.py new file mode 100644 index 0000000..85ebb01 --- /dev/null +++ b/tests/synonym_set_test.py @@ -0,0 +1,127 @@ +"""Tests for the SynonymSet class.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import assert_match_object, assert_object_lists_match +from tests.utils.version import is_v30_or_above +from typesense.api_call import ApiCall +from typesense.client import Client +from typesense.synonym_set import SynonymSet +from typesense.types.synonym_set import SynonymSetDeleteSchema, SynonymSetRetrieveSchema + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Run synonym set tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the SynonymSet object is initialized correctly.""" + synset = SynonymSet(fake_api_call, "test-set") + + assert synset.name == "test-set" + assert_match_object(synset.api_call, fake_api_call) + assert_object_lists_match( + synset.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synset.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + assert synset._endpoint_path == "/synonym_sets/test-set" # noqa: WPS437 + + +def test_retrieve(fake_synonym_set: SynonymSet) -> None: + """Test that the SynonymSet object can retrieve a synonym set.""" + json_response: SynonymSetRetrieveSchema = { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + } + + with requests_mock.Mocker() as mock: + mock.get( + "/synonym_sets/test-set", + json=json_response, + ) + + response = fake_synonym_set.retrieve() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "GET" + assert ( + mock.request_history[0].url + == "http://nearest:8108/synonym_sets/test-set" + ) + assert response == json_response + + +def test_delete(fake_synonym_set: SynonymSet) -> None: + """Test that the SynonymSet object can delete a synonym set.""" + json_response: SynonymSetDeleteSchema = { + "name": "test-set", + } + with requests_mock.Mocker() as mock: + mock.delete( + "/synonym_sets/test-set", + json=json_response, + ) + + response = fake_synonym_set.delete() + + assert len(mock.request_history) == 1 + assert mock.request_history[0].method == "DELETE" + assert ( + mock.request_history[0].url + == "http://nearest:8108/synonym_sets/test-set" + ) + assert response == json_response + + +def test_actual_retrieve( + actual_synonym_sets: "SynonymSets", # type: ignore[name-defined] + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can retrieve a synonym set from Typesense Server.""" + response = actual_synonym_sets["test-set"].retrieve() + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ] + } + + +def test_actual_delete( + actual_synonym_sets: "SynonymSets", # type: ignore[name-defined] + create_synonym_set: None, +) -> None: + """Test that the SynonymSet object can delete a synonym set from Typesense Server.""" + response = actual_synonym_sets["test-set"].delete() + + assert response == {"name": "test-set"} + + diff --git a/tests/synonym_sets_test.py b/tests/synonym_sets_test.py new file mode 100644 index 0000000..24cea59 --- /dev/null +++ b/tests/synonym_sets_test.py @@ -0,0 +1,163 @@ +"""Tests for the SynonymSets class.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, + assert_to_contain_object, +) +from tests.utils.version import is_v30_or_above +from typesense.api_call import ApiCall +from typesense.client import Client +from typesense.synonym_sets import SynonymSets +from typesense.types.synonym_set import ( + SynonymSetCreateSchema, + SynonymSetSchema, +) + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Run synonym sets tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the SynonymSets object is initialized correctly.""" + synsets = SynonymSets(fake_api_call) + + assert_match_object(synsets.api_call, fake_api_call) + assert_object_lists_match( + synsets.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + assert_match_object( + synsets.api_call.config.nearest_node, + fake_api_call.config.nearest_node, + ) + + +def test_retrieve(fake_synonym_sets: SynonymSets) -> None: + """Test that the SynonymSets object can retrieve synonym sets.""" + json_response = [ + { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + ] + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/synonym_sets", + json=json_response, + ) + + response = fake_synonym_sets.retrieve() + + assert isinstance(response, list) + assert len(response) == 1 + assert response == json_response + + +def test_create(fake_synonym_sets: SynonymSets) -> None: + """Test that the SynonymSets object can create a synonym set.""" + json_response: SynonymSetSchema = { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/synonym_sets/test-set", + json=json_response, + ) + + payload: SynonymSetCreateSchema = { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + } + fake_synonym_sets.upsert("test-set", payload) + + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert ( + mock.last_request.url == "http://nearest:8108/synonym_sets/test-set" + ) + assert mock.last_request.json() == payload + + +def test_actual_create( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, +) -> None: + """Test that the SynonymSets object can create a synonym set on Typesense Server.""" + response = actual_synonym_sets.upsert( + "test-set", + { + "items": [ + { + "id": "company_synonym", + "synonyms": ["companies", "corporations", "firms"], + } + ] + }, + ) + + assert response == { + "name": "test-set", + "items": [ + { + "id": "company_synonym", + "root": "", + "synonyms": ["companies", "corporations", "firms"], + } + ], + } + + +def test_actual_retrieve( + actual_synonym_sets: SynonymSets, + delete_all_synonym_sets: None, + create_synonym_set: None, +) -> None: + """Test that the SynonymSets object can retrieve a synonym set from Typesense Server.""" + response = actual_synonym_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "test-set", + }, + ) + + From afd5d92e0af1f55d6f431058ac451b561061f34b Mon Sep 17 00:00:00 2001 From: Harisaran G Date: Tue, 23 Sep 2025 11:02:21 +0530 Subject: [PATCH 259/288] add: curation_sets --- src/typesense/curation_set.py | 97 +++++++++++++++++++ src/typesense/curation_sets.py | 53 ++++++++++ src/typesense/synonym_set.py | 51 ++++++++++ src/typesense/types/curation_set.py | 99 +++++++++++++++++++ src/typesense/types/synonym_set.py | 10 +- tests/analytics_rule_v1_test.py | 18 +++- tests/analytics_rules_test.py | 4 +- tests/analytics_rules_v1_test.py | 21 ++-- tests/collection_test.py | 9 +- tests/collections_test.py | 6 +- tests/curation_set_test.py | 123 ++++++++++++++++++++++++ tests/curation_sets_test.py | 112 +++++++++++++++++++++ tests/fixtures/analytics_fixtures.py | 2 +- tests/fixtures/curation_set_fixtures.py | 73 ++++++++++++++ tests/override_test.py | 14 +++ tests/overrides_test.py | 14 ++- tests/synonym_set_items_test.py | 85 ++++++++++++++++ 17 files changed, 768 insertions(+), 23 deletions(-) create mode 100644 src/typesense/curation_set.py create mode 100644 src/typesense/curation_sets.py create mode 100644 src/typesense/types/curation_set.py create mode 100644 tests/curation_set_test.py create mode 100644 tests/curation_sets_test.py create mode 100644 tests/fixtures/curation_set_fixtures.py create mode 100644 tests/synonym_set_items_test.py diff --git a/src/typesense/curation_set.py b/src/typesense/curation_set.py new file mode 100644 index 0000000..f0db7e4 --- /dev/null +++ b/src/typesense/curation_set.py @@ -0,0 +1,97 @@ +"""Client for single Curation Set operations, including items APIs.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.curation_set import ( + CurationSetSchema, + CurationSetDeleteSchema, + CurationSetUpsertSchema, + CurationSetListItemResponseSchema, + CurationItemSchema, + CurationItemDeleteSchema, +) + + +class CurationSet: + def __init__(self, api_call: ApiCall, name: str) -> None: + self.api_call = api_call + self.name = name + + @property + def _endpoint_path(self) -> str: + from typesense.curation_sets import CurationSets + + return "/".join([CurationSets.resource_path, self.name]) + + def retrieve(self) -> CurationSetSchema: + response: CurationSetSchema = self.api_call.get( + self._endpoint_path, + as_json=True, + entity_type=CurationSetSchema, + ) + return response + + def delete(self) -> CurationSetDeleteSchema: + response: CurationSetDeleteSchema = self.api_call.delete( + self._endpoint_path, + entity_type=CurationSetDeleteSchema, + ) + return response + + # Items sub-resource + @property + def _items_path(self) -> str: + return "/".join([self._endpoint_path, "items"]) # /curation_sets/{name}/items + + def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> CurationSetListItemResponseSchema: + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + # Filter out None values to avoid sending them + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None # type: ignore[dict-item] + } + response: CurationSetListItemResponseSchema = self.api_call.get( + self._items_path, + as_json=True, + entity_type=CurationSetListItemResponseSchema, + params=clean_params or None, + ) + return response + + def get_item(self, item_id: str) -> CurationItemSchema: + response: CurationItemSchema = self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=CurationItemSchema, + ) + return response + + def upsert_item(self, item_id: str, item: CurationItemSchema) -> CurationItemSchema: + response: CurationItemSchema = self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=CurationItemSchema, + ) + return response + + def delete_item(self, item_id: str) -> CurationItemDeleteSchema: + response: CurationItemDeleteSchema = self.api_call.delete( + "/".join([self._items_path, item_id]), + entity_type=CurationItemDeleteSchema, + ) + return response + + diff --git a/src/typesense/curation_sets.py b/src/typesense/curation_sets.py new file mode 100644 index 0000000..d257f42 --- /dev/null +++ b/src/typesense/curation_sets.py @@ -0,0 +1,53 @@ +"""Client for Curation Sets collection operations.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + +from typesense.api_call import ApiCall +from typesense.types.curation_set import ( + CurationSetSchema, + CurationSetUpsertSchema, + CurationSetsListResponseSchema, + CurationSetListItemResponseSchema, + CurationItemDeleteSchema, + CurationSetDeleteSchema, + CurationItemSchema, +) + + +class CurationSets: + resource_path: typing.Final[str] = "/curation_sets" + + def __init__(self, api_call: ApiCall) -> None: + self.api_call = api_call + + def retrieve(self) -> CurationSetsListResponseSchema: + response: CurationSetsListResponseSchema = self.api_call.get( + CurationSets.resource_path, + as_json=True, + entity_type=CurationSetsListResponseSchema, + ) + return response + + def __getitem__(self, curation_set_name: str) -> "CurationSet": + from typesense.curation_set import CurationSet as PerSet + + return PerSet(self.api_call, curation_set_name) + + def upsert( + self, + curation_set_name: str, + payload: CurationSetUpsertSchema, + ) -> CurationSetSchema: + response: CurationSetSchema = self.api_call.put( + "/".join([CurationSets.resource_path, curation_set_name]), + body=payload, + entity_type=CurationSetSchema, + ) + return response + + diff --git a/src/typesense/synonym_set.py b/src/typesense/synonym_set.py index c6c6b3b..daa9c7d 100644 --- a/src/typesense/synonym_set.py +++ b/src/typesense/synonym_set.py @@ -11,6 +11,8 @@ from typesense.types.synonym_set import ( SynonymSetDeleteSchema, SynonymSetRetrieveSchema, + SynonymItemSchema, + SynonymItemDeleteSchema, ) @@ -39,5 +41,54 @@ def delete(self) -> SynonymSetDeleteSchema: entity_type=SynonymSetDeleteSchema, ) return response + + @property + def _items_path(self) -> str: + return "/".join([self._endpoint_path, "items"]) # /synonym_sets/{name}/items + + def list_items( + self, + *, + limit: typing.Union[int, None] = None, + offset: typing.Union[int, None] = None, + ) -> typing.List[SynonymItemSchema]: + params: typing.Dict[str, typing.Union[int, None]] = { + "limit": limit, + "offset": offset, + } + clean_params: typing.Dict[str, int] = { + k: v for k, v in params.items() if v is not None # type: ignore[dict-item] + } + response: typing.List[SynonymItemSchema] = self.api_call.get( + self._items_path, + as_json=True, + entity_type=typing.List[SynonymItemSchema], + params=clean_params or None, + ) + return response + + def get_item(self, item_id: str) -> SynonymItemSchema: + response: SynonymItemSchema = self.api_call.get( + "/".join([self._items_path, item_id]), + as_json=True, + entity_type=SynonymItemSchema, + ) + return response + + def upsert_item(self, item_id: str, item: SynonymItemSchema) -> SynonymItemSchema: + response: SynonymItemSchema = self.api_call.put( + "/".join([self._items_path, item_id]), + body=item, + entity_type=SynonymItemSchema, + ) + return response + + def delete_item(self, item_id: str) -> typing.Dict[str, str]: + # API returns {"id": "..."} for delete; openapi defines SynonymItemDeleteResponse with name but for items it's id + response: SynonymItemDeleteSchema = self.api_call.delete( + "/".join([self._items_path, item_id]), + entity_type=typing.Dict[str, str], + ) + return response diff --git a/src/typesense/types/curation_set.py b/src/typesense/types/curation_set.py new file mode 100644 index 0000000..3a8c617 --- /dev/null +++ b/src/typesense/types/curation_set.py @@ -0,0 +1,99 @@ +"""Curation Set types for Typesense Python Client.""" + +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing + + +class CurationIncludeSchema(typing.TypedDict): + """ + Schema representing an included document for a curation rule. + """ + + id: str + position: int + + +class CurationExcludeSchema(typing.TypedDict): + """ + Schema representing an excluded document for a curation rule. + """ + + id: str + + +class CurationRuleSchema(typing.TypedDict, total=False): + """ + Schema representing rule conditions for a curation item. + """ + + query: str + match: typing.Literal["exact", "contains"] + filter_by: str + tags: typing.List[str] + + +class CurationItemSchema(typing.TypedDict, total=False): + """ + Schema for a single curation item (aka CurationObject in the API). + """ + + id: str + rule: CurationRuleSchema + includes: typing.List[CurationIncludeSchema] + excludes: typing.List[CurationExcludeSchema] + filter_by: str + sort_by: str + replace_query: str + remove_matched_tokens: bool + filter_curated_hits: bool + stop_processing: bool + metadata: typing.Dict[str, typing.Any] + + +class CurationSetUpsertSchema(typing.TypedDict): + """ + Payload schema to create or replace a curation set. + """ + + items: typing.List[CurationItemSchema] + + +class CurationSetSchema(CurationSetUpsertSchema): + """ + Response schema for a curation set. + """ + + name: str + + +class CurationSetsListEntrySchema(typing.TypedDict): + """A single entry in the curation sets list response.""" + + name: str + items: typing.List[CurationItemSchema] + + +class CurationSetsListResponseSchema(typing.List[CurationSetsListEntrySchema]): + """List response for all curation sets.""" + + +class CurationSetListItemResponseSchema(typing.List[CurationItemSchema]): + """List response for items under a specific curation set.""" + + +class CurationItemDeleteSchema(typing.TypedDict): + """Response schema for deleting a curation item.""" + + id: str + + +class CurationSetDeleteSchema(typing.TypedDict): + """Response schema for deleting a curation set.""" + + name: str + + diff --git a/src/typesense/types/synonym_set.py b/src/typesense/types/synonym_set.py index c786d6b..9d0dfe1 100644 --- a/src/typesense/types/synonym_set.py +++ b/src/typesense/types/synonym_set.py @@ -29,6 +29,12 @@ class SynonymItemSchema(typing.TypedDict): locale: typing.NotRequired[Locales] symbols_to_index: typing.NotRequired[typing.List[str]] +class SynonymItemDeleteSchema(typing.TypedDict): + """ + Schema for deleting a synonym item. + """ + + id: str class SynonymSetCreateSchema(typing.TypedDict): """ @@ -67,6 +73,4 @@ class SynonymSetDeleteSchema(typing.TypedDict): name (str): Name of the deleted synonym set. """ - name: str - - + name: str \ No newline at end of file diff --git a/tests/analytics_rule_v1_test.py b/tests/analytics_rule_v1_test.py index 8cc970b..4e3534c 100644 --- a/tests/analytics_rule_v1_test.py +++ b/tests/analytics_rule_v1_test.py @@ -12,8 +12,16 @@ from typesense.api_call import ApiCall from typesense.types.analytics_rule_v1 import RuleDeleteSchema, RuleSchemaForQueries +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Skip AnalyticsV1 tests on v30+" +) -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsRuleV1 object is initialized correctly.""" analytics_rule = AnalyticsRuleV1(fake_api_call, "company_analytics_rule") @@ -34,7 +42,7 @@ def test_init(fake_api_call: ApiCall) -> None: ) -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_retrieve(fake_analytics_rule: AnalyticsRuleV1) -> None: """Test that the AnalyticsRuleV1 object can retrieve an analytics_rule.""" json_response: RuleSchemaForQueries = { @@ -65,7 +73,7 @@ def test_retrieve(fake_analytics_rule: AnalyticsRuleV1) -> None: assert response == json_response -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_delete(fake_analytics_rule: AnalyticsRuleV1) -> None: """Test that the AnalyticsRuleV1 object can delete an analytics_rule.""" json_response: RuleDeleteSchema = { @@ -88,7 +96,7 @@ def test_delete(fake_analytics_rule: AnalyticsRuleV1) -> None: assert response == json_response -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_actual_retrieve( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -111,7 +119,7 @@ def test_actual_retrieve( assert response == expected -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_actual_delete( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py index ef67bb6..81fce0b 100644 --- a/tests/analytics_rules_test.py +++ b/tests/analytics_rules_test.py @@ -40,7 +40,7 @@ def test_rules_create(fake_api_call) -> None: "name": "company_analytics_rule", "type": "popular_queries", "collection": "companies", - "event_type": "query", + "event_type": "search", "params": {"destination_collection": "companies_queries", "limit": 1000}, } with requests_mock.Mocker() as mock: @@ -95,7 +95,7 @@ def test_actual_create( "name": "company_analytics_rule", "type": "nohits_queries", "collection": "companies", - "event_type": "query", + "event_type": "search", "params": {"destination_collection": "companies_queries", "limit": 1000}, } resp = actual_analytics_rules.create(rule=body) diff --git a/tests/analytics_rules_v1_test.py b/tests/analytics_rules_v1_test.py index 674ac34..6ea2d91 100644 --- a/tests/analytics_rules_v1_test.py +++ b/tests/analytics_rules_v1_test.py @@ -15,7 +15,16 @@ ) -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Skip AnalyticsV1 tests on v30+" +) + def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsRulesV1 object is initialized correctly.""" analytics_rules = AnalyticsRulesV1(fake_api_call) @@ -33,7 +42,6 @@ def test_init(fake_api_call: ApiCall) -> None: assert not analytics_rules.rules -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: """Test that the AnalyticsRulesV1 object can get a missing analytics_rule.""" analytics_rule = fake_analytics_rules["company_analytics_rule"] @@ -54,7 +62,6 @@ def test_get_missing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> N ) -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> None: """Test that the AnalyticsRulesV1 object can get an existing analytics_rule.""" analytics_rule = fake_analytics_rules["company_analytics_rule"] @@ -65,7 +72,6 @@ def test_get_existing_analytics_rule(fake_analytics_rules: AnalyticsRulesV1) -> assert analytics_rule is fetched_analytics_rule -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_retrieve(fake_analytics_rules: AnalyticsRulesV1) -> None: """Test that the AnalyticsRulesV1 object can retrieve analytics_rules.""" json_response: RulesRetrieveSchema = { @@ -96,7 +102,6 @@ def test_retrieve(fake_analytics_rules: AnalyticsRulesV1) -> None: assert response == json_response -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") def test_create(fake_analytics_rules: AnalyticsRulesV1) -> None: """Test that the AnalyticsRulesV1 object can create a analytics_rule.""" json_response: RuleCreateSchemaForQueries = { @@ -145,7 +150,7 @@ def test_create(fake_analytics_rules: AnalyticsRulesV1) -> None: } -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_actual_create( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -177,7 +182,7 @@ def test_actual_create( } -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_actual_update( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -208,7 +213,7 @@ def test_actual_update( } -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") + def test_actual_retrieve( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, diff --git a/tests/collection_test.py b/tests/collection_test.py index 49e6422..d01ae2f 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -57,6 +57,8 @@ def test_retrieve(fake_collection: Collection) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [], + "curation_sets": [], } with requests_mock.mock() as mock: @@ -100,6 +102,8 @@ def test_update(fake_collection: Collection) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [], + "curation_sets": [], } with requests_mock.mock() as mock: @@ -158,6 +162,8 @@ def test_delete(fake_collection: Collection) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], + "synonym_sets": [], + "curation_sets": [], } with requests_mock.mock() as mock: @@ -218,7 +224,8 @@ def test_actual_retrieve( "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], + "curation_sets": [], } response.pop("created_at") diff --git a/tests/collections_test.py b/tests/collections_test.py index a68b468..a52c44d 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -223,7 +223,8 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], + "curation_sets": [], } response = actual_collections.create( @@ -292,7 +293,8 @@ def test_actual_retrieve( "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], + "curation_sets": [], }, ] diff --git a/tests/curation_set_test.py b/tests/curation_set_test.py new file mode 100644 index 0000000..d975b4c --- /dev/null +++ b/tests/curation_set_test.py @@ -0,0 +1,123 @@ +"""Tests for the CurationSet class including items APIs.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.version import is_v30_or_above +from typesense.client import Client +from typesense.curation_set import CurationSet +from typesense.types.curation_set import ( + CurationItemDeleteSchema, + CurationItemSchema, + CurationSetDeleteSchema, + CurationSetListItemResponseSchema, + CurationSetSchema, +) + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Run curation set tests only on v30+", +) + + +def test_paths(fake_curation_set: CurationSet) -> None: + assert fake_curation_set._endpoint_path == "/curation_sets/products" # noqa: WPS437 + assert fake_curation_set._items_path == "/curation_sets/products/items" # noqa: WPS437 + + +def test_retrieve(fake_curation_set: CurationSet) -> None: + json_response: CurationSetSchema = { + "name": "products", + "items": [], + } + with requests_mock.Mocker() as mock: + mock.get( + "/curation_sets/products", + json=json_response, + ) + res = fake_curation_set.retrieve() + assert res == json_response + + +def test_delete(fake_curation_set: CurationSet) -> None: + json_response: CurationSetDeleteSchema = {"name": "products"} + with requests_mock.Mocker() as mock: + mock.delete( + "/curation_sets/products", + json=json_response, + ) + res = fake_curation_set.delete() + assert res == json_response + + +def test_list_items(fake_curation_set: CurationSet) -> None: + json_response: CurationSetListItemResponseSchema = [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + ] + with requests_mock.Mocker() as mock: + mock.get( + "/curation_sets/products/items?limit=10&offset=0", + json=json_response, + ) + res = fake_curation_set.list_items(limit=10, offset=0) + assert res == json_response + + +def test_get_item(fake_curation_set: CurationSet) -> None: + json_response: CurationItemSchema = { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + with requests_mock.Mocker() as mock: + mock.get( + "/curation_sets/products/items/rule-1", + json=json_response, + ) + res = fake_curation_set.get_item("rule-1") + assert res == json_response + + +def test_upsert_item(fake_curation_set: CurationSet) -> None: + payload: CurationItemSchema = { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + json_response = payload + with requests_mock.Mocker() as mock: + mock.put( + "/curation_sets/products/items/rule-1", + json=json_response, + ) + res = fake_curation_set.upsert_item("rule-1", payload) + assert res == json_response + + +def test_delete_item(fake_curation_set: CurationSet) -> None: + json_response: CurationItemDeleteSchema = {"id": "rule-1"} + with requests_mock.Mocker() as mock: + mock.delete( + "/curation_sets/products/items/rule-1", + json=json_response, + ) + res = fake_curation_set.delete_item("rule-1") + assert res == json_response + + diff --git a/tests/curation_sets_test.py b/tests/curation_sets_test.py new file mode 100644 index 0000000..5f4a270 --- /dev/null +++ b/tests/curation_sets_test.py @@ -0,0 +1,112 @@ +"""Tests for the CurationSets class.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.object_assertions import ( + assert_match_object, + assert_object_lists_match, +) +from tests.utils.version import is_v30_or_above +from typesense.api_call import ApiCall +from typesense.client import Client +from typesense.curation_sets import CurationSets +from typesense.types.curation_set import CurationSetSchema, CurationSetUpsertSchema + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Run curation sets tests only on v30+", +) + + +def test_init(fake_api_call: ApiCall) -> None: + """Test that the CurationSets object is initialized correctly.""" + cur_sets = CurationSets(fake_api_call) + + assert_match_object(cur_sets.api_call, fake_api_call) + assert_object_lists_match( + cur_sets.api_call.node_manager.nodes, + fake_api_call.node_manager.nodes, + ) + + +def test_retrieve(fake_curation_sets: CurationSets) -> None: + """Test that the CurationSets object can retrieve curation sets.""" + json_response = [ + { + "name": "products", + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + ], + } + ] + + with requests_mock.Mocker() as mock: + mock.get( + "http://nearest:8108/curation_sets", + json=json_response, + ) + + response = fake_curation_sets.retrieve() + + assert isinstance(response, list) + assert len(response) == 1 + assert response == json_response + + +def test_upsert(fake_curation_sets: CurationSets) -> None: + """Test that the CurationSets object can upsert a curation set.""" + json_response: CurationSetSchema = { + "name": "products", + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + ], + } + + with requests_mock.Mocker() as mock: + mock.put( + "http://nearest:8108/curation_sets/products", + json=json_response, + ) + + payload: CurationSetUpsertSchema = { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + } + ] + } + response = fake_curation_sets.upsert("products", payload) + + assert response == json_response + assert mock.call_count == 1 + assert mock.called is True + assert mock.last_request.method == "PUT" + assert ( + mock.last_request.url == "http://nearest:8108/curation_sets/products" + ) + assert mock.last_request.json() == payload + + diff --git a/tests/fixtures/analytics_fixtures.py b/tests/fixtures/analytics_fixtures.py index d0f7715..a95c8b5 100644 --- a/tests/fixtures/analytics_fixtures.py +++ b/tests/fixtures/analytics_fixtures.py @@ -38,7 +38,7 @@ def create_analytics_rule_fixture( "name": "company_analytics_rule", "type": "nohits_queries", "collection": "companies", - "event_type": "query", + "event_type": "search", "params": { "destination_collection": "companies_queries", "limit": 1000, diff --git a/tests/fixtures/curation_set_fixtures.py b/tests/fixtures/curation_set_fixtures.py new file mode 100644 index 0000000..6ab184c --- /dev/null +++ b/tests/fixtures/curation_set_fixtures.py @@ -0,0 +1,73 @@ +"""Fixtures for the curation set tests.""" + +import pytest +import requests + +from typesense.api_call import ApiCall +from typesense.curation_set import CurationSet +from typesense.curation_sets import CurationSets + + +@pytest.fixture(scope="function", name="create_curation_set") +def create_curation_set_fixture() -> None: + """Create a curation set in the Typesense server.""" + url = "http://localhost:8108/curation_sets/products" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + data = { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + "excludes": [{"id": "999"}], + } + ] + } + + resp = requests.put(url, headers=headers, json=data, timeout=3) + resp.raise_for_status() + + +@pytest.fixture(scope="function", name="delete_all_curation_sets") +def clear_typesense_curation_sets() -> None: + """Remove all curation sets from the Typesense server.""" + url = "http://localhost:8108/curation_sets" + headers = {"X-TYPESENSE-API-KEY": "xyz"} + + response = requests.get(url, headers=headers, timeout=3) + response.raise_for_status() + data = response.json() + + for cur in data: + name = cur.get("name") + if not name: + continue + delete_url = f"{url}/{name}" + delete_response = requests.delete(delete_url, headers=headers, timeout=3) + delete_response.raise_for_status() + + +@pytest.fixture(scope="function", name="actual_curation_sets") +def actual_curation_sets_fixture(actual_api_call: ApiCall) -> CurationSets: + """Return a CurationSets object using a real API.""" + return CurationSets(actual_api_call) + + +@pytest.fixture(scope="function", name="actual_curation_set") +def actual_curation_set_fixture(actual_api_call: ApiCall) -> CurationSet: + """Return a CurationSet object using a real API.""" + return CurationSet(actual_api_call, "products") + + +@pytest.fixture(scope="function", name="fake_curation_sets") +def fake_curation_sets_fixture(fake_api_call: ApiCall) -> CurationSets: + """Return a CurationSets object with test values.""" + return CurationSets(fake_api_call) + + +@pytest.fixture(scope="function", name="fake_curation_set") +def fake_curation_set_fixture(fake_api_call: ApiCall) -> CurationSet: + """Return a CurationSet object with test values.""" + return CurationSet(fake_api_call, "products") + + diff --git a/tests/override_test.py b/tests/override_test.py index 25b05fd..0886bc5 100644 --- a/tests/override_test.py +++ b/tests/override_test.py @@ -2,6 +2,7 @@ from __future__ import annotations +import pytest import requests_mock from tests.utils.object_assertions import ( @@ -13,6 +14,19 @@ from typesense.collections import Collections from typesense.override import Override, OverrideDeleteSchema from typesense.types.override import OverrideSchema +from tests.utils.version import is_v30_or_above +from typesense.client import Client + + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Run override tests only on less than v30", +) def test_init(fake_api_call: ApiCall) -> None: diff --git a/tests/overrides_test.py b/tests/overrides_test.py index 872fe54..4593961 100644 --- a/tests/overrides_test.py +++ b/tests/overrides_test.py @@ -3,6 +3,7 @@ from __future__ import annotations import requests_mock +import pytest from tests.utils.object_assertions import ( assert_match_object, @@ -12,7 +13,18 @@ from typesense.api_call import ApiCall from typesense.collections import Collections from typesense.overrides import OverrideRetrieveSchema, Overrides, OverrideSchema - +from tests.utils.version import is_v30_or_above +from typesense.client import Client + +pytestmark = pytest.mark.skipif( + is_v30_or_above( + Client({ + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + }) + ), + reason="Run override tests only on less than v30", +) def test_init(fake_api_call: ApiCall) -> None: """Test that the Overrides object is initialized correctly.""" diff --git a/tests/synonym_set_items_test.py b/tests/synonym_set_items_test.py new file mode 100644 index 0000000..0fb55d7 --- /dev/null +++ b/tests/synonym_set_items_test.py @@ -0,0 +1,85 @@ +"""Tests for SynonymSet item-level APIs.""" + +from __future__ import annotations + +import pytest +import requests_mock + +from tests.utils.version import is_v30_or_above +from typesense.client import Client +from typesense.synonym_set import SynonymSet +from typesense.types.synonym_set import ( + SynonymItemDeleteSchema, + SynonymItemSchema, +) + + +pytestmark = pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [ + {"host": "localhost", "port": 8108, "protocol": "http"} + ], + } + ) + ), + reason="Run synonym set items tests only on v30+", +) + + +def test_list_items(fake_synonym_set: SynonymSet) -> None: + json_response = [ + {"id": "nike", "synonyms": ["nike", "nikes"]}, + {"id": "adidas", "synonyms": ["adidas", "adi"]}, + ] + with requests_mock.Mocker() as mock: + mock.get( + "/synonym_sets/test-set/items?limit=10&offset=0", + json=json_response, + ) + res = fake_synonym_set.list_items(limit=10, offset=0) + assert res == json_response + + +def test_get_item(fake_synonym_set: SynonymSet) -> None: + json_response: SynonymItemSchema = { + "id": "nike", + "synonyms": ["nike", "nikes"], + } + with requests_mock.Mocker() as mock: + mock.get( + "/synonym_sets/test-set/items/nike", + json=json_response, + ) + res = fake_synonym_set.get_item("nike") + assert res == json_response + + +def test_upsert_item(fake_synonym_set: SynonymSet) -> None: + payload: SynonymItemSchema = { + "id": "nike", + "synonyms": ["nike", "nikes"], + } + json_response = payload + with requests_mock.Mocker() as mock: + mock.put( + "/synonym_sets/test-set/items/nike", + json=json_response, + ) + res = fake_synonym_set.upsert_item("nike", payload) + assert res == json_response + + +def test_delete_item(fake_synonym_set: SynonymSet) -> None: + json_response: SynonymItemDeleteSchema = {"id": "nike"} + with requests_mock.Mocker() as mock: + mock.delete( + "/synonym_sets/test-set/items/nike", + json=json_response, + ) + res = fake_synonym_set.delete_item("nike") + assert res == json_response + + From 6c0a52e8ede67a15eb4f848b6bea82372d722654 Mon Sep 17 00:00:00 2001 From: Harisaran G Date: Tue, 23 Sep 2025 11:12:51 +0530 Subject: [PATCH 260/288] fix: types --- src/typesense/types/analytics.py | 26 ++++++++++++-------------- src/typesense/types/curation_set.py | 24 ++++++++++++------------ 2 files changed, 24 insertions(+), 26 deletions(-) diff --git a/src/typesense/types/analytics.py b/src/typesense/types/analytics.py index 540c8b4..5f5d133 100644 --- a/src/typesense/types/analytics.py +++ b/src/typesense/types/analytics.py @@ -12,7 +12,6 @@ class AnalyticsEvent(typing.TypedDict): """Schema for an analytics event to be created.""" name: str - event_type: str data: typing.Dict[str, typing.Any] @@ -24,13 +23,12 @@ class AnalyticsEventCreateResponse(typing.TypedDict): class _AnalyticsEventItem(typing.TypedDict, total=False): name: str - event_type: str collection: str - timestamp: int + timestamp: typing.NotRequired[int] user_id: str - doc_id: str - doc_ids: typing.List[str] - query: str + doc_id: typing.NotRequired[str] + doc_ids: typing.NotRequired[typing.List[str]] + query: typing.NotRequired[str] class AnalyticsEventsResponse(typing.TypedDict): @@ -54,13 +52,13 @@ class AnalyticsStatus(typing.TypedDict, total=False): # Rules class AnalyticsRuleParams(typing.TypedDict, total=False): - destination_collection: str - limit: int - capture_search_requests: bool - meta_fields: typing.List[str] - expand_query: bool - counter_field: str - weight: int + destination_collection: typing.NotRequired[str] + limit: typing.NotRequired[int] + capture_search_requests: typing.NotRequired[bool] + meta_fields: typing.NotRequired[typing.List[str]] + expand_query: typing.NotRequired[bool] + counter_field: typing.NotRequired[str] + weight: typing.NotRequired[int] class AnalyticsRuleCreate(typing.TypedDict): @@ -68,7 +66,7 @@ class AnalyticsRuleCreate(typing.TypedDict): type: str collection: str event_type: str - params: AnalyticsRuleParams + params: typing.NotRequired[AnalyticsRuleParams] rule_tag: typing.NotRequired[str] diff --git a/src/typesense/types/curation_set.py b/src/typesense/types/curation_set.py index 3a8c617..f3d3729 100644 --- a/src/typesense/types/curation_set.py +++ b/src/typesense/types/curation_set.py @@ -30,10 +30,10 @@ class CurationRuleSchema(typing.TypedDict, total=False): Schema representing rule conditions for a curation item. """ - query: str - match: typing.Literal["exact", "contains"] - filter_by: str - tags: typing.List[str] + query: typing.NotRequired[str] + match: typing.NotRequired[typing.Literal["exact", "contains"]] + filter_by: typing.NotRequired[str] + tags: typing.NotRequired[typing.List[str]] class CurationItemSchema(typing.TypedDict, total=False): @@ -43,14 +43,14 @@ class CurationItemSchema(typing.TypedDict, total=False): id: str rule: CurationRuleSchema - includes: typing.List[CurationIncludeSchema] - excludes: typing.List[CurationExcludeSchema] - filter_by: str - sort_by: str - replace_query: str - remove_matched_tokens: bool - filter_curated_hits: bool - stop_processing: bool + includes: typing.NotRequired[typing.List[CurationIncludeSchema]] + excludes: typing.NotRequired[typing.List[CurationExcludeSchema]] + filter_by: typing.NotRequired[str] + sort_by: typing.NotRequired[str] + replace_query: typing.NotRequired[str] + remove_matched_tokens: typing.NotRequired[bool] + filter_curated_hits: typing.NotRequired[bool] + stop_processing: typing.NotRequired[bool] metadata: typing.Dict[str, typing.Any] From ca6d662a968b794d957d528af69652cf2adc39dd Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 24 Sep 2025 09:22:38 +0300 Subject: [PATCH 261/288] fix(types): add `stem_dictionary` to collection types --- src/typesense/types/collection.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 2cb0d28..1ce839c 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -77,6 +77,7 @@ class CollectionFieldSchema(typing.Generic[_TType], typing.TypedDict, total=Fals optional: typing.NotRequired[bool] infix: typing.NotRequired[bool] stem: typing.NotRequired[bool] + stem_dictionary: typing.NotRequired[str] locale: typing.NotRequired[Locales] sort: typing.NotRequired[bool] store: typing.NotRequired[bool] From 50206a7775a699d6831dfed0358a5535c9914b44 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 12:31:47 +0200 Subject: [PATCH 262/288] chore: lint --- tests/api_call_test.py | 1 - tests/nl_search_models_test.py | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/api_call_test.py b/tests/api_call_test.py index 1d5fa11..e13c056 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -6,7 +6,6 @@ import sys import time -from isort import Config from pytest_mock import MockFixture if sys.version_info >= (3, 11): diff --git a/tests/nl_search_models_test.py b/tests/nl_search_models_test.py index 1558b39..daaa842 100644 --- a/tests/nl_search_models_test.py +++ b/tests/nl_search_models_test.py @@ -8,9 +8,9 @@ import pytest if sys.version_info >= (3, 11): - import typing + pass else: - import typing_extensions as typing + pass from tests.utils.object_assertions import ( assert_match_object, @@ -20,7 +20,6 @@ ) from typesense.api_call import ApiCall from typesense.nl_search_models import NLSearchModels -from typesense.types.nl_search_model import NLSearchModelSchema def test_init(fake_api_call: ApiCall) -> None: From 957e18ab1cc25fce37e9fe61f6c3fbfae46db153 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 12:34:35 +0200 Subject: [PATCH 263/288] fix: import class for `SynonymSets` on test --- tests/synonym_set_test.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/synonym_set_test.py b/tests/synonym_set_test.py index 85ebb01..ee6650d 100644 --- a/tests/synonym_set_test.py +++ b/tests/synonym_set_test.py @@ -10,6 +10,7 @@ from typesense.api_call import ApiCall from typesense.client import Client from typesense.synonym_set import SynonymSet +from typesense.synonym_sets import SynonymSets from typesense.types.synonym_set import SynonymSetDeleteSchema, SynonymSetRetrieveSchema @@ -96,7 +97,7 @@ def test_delete(fake_synonym_set: SynonymSet) -> None: def test_actual_retrieve( - actual_synonym_sets: "SynonymSets", # type: ignore[name-defined] + actual_synonym_sets: SynonymSets, delete_all_synonym_sets: None, create_synonym_set: None, ) -> None: @@ -116,7 +117,7 @@ def test_actual_retrieve( def test_actual_delete( - actual_synonym_sets: "SynonymSets", # type: ignore[name-defined] + actual_synonym_sets: SynonymSets, create_synonym_set: None, ) -> None: """Test that the SynonymSet object can delete a synonym set from Typesense Server.""" From 6a15127c2a23ac45a0879ec8110287252ac1334e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 12:35:04 +0200 Subject: [PATCH 264/288] fix(test): add `truncate_len` to expected schemas in collection tests --- tests/collection_test.py | 7 +++---- tests/collections_test.py | 4 ++++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/collection_test.py b/tests/collection_test.py index d01ae2f..56c4429 100644 --- a/tests/collection_test.py +++ b/tests/collection_test.py @@ -204,6 +204,7 @@ def test_actual_retrieve( "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, { @@ -217,6 +218,7 @@ def test_actual_retrieve( "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, ], @@ -245,10 +247,7 @@ def test_actual_update( expected: CollectionSchema = { "fields": [ - { - "name": "num_locations", - "type": "int32", - }, + {"name": "num_locations", "truncate_len": 100, "type": "int32"}, ], } diff --git a/tests/collections_test.py b/tests/collections_test.py index a52c44d..55142ae 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -203,6 +203,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, { @@ -216,6 +217,7 @@ def test_actual_create(actual_collections: Collections, delete_all: None) -> Non "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, ], @@ -273,6 +275,7 @@ def test_actual_retrieve( "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, { @@ -286,6 +289,7 @@ def test_actual_retrieve( "infix": False, "stem": False, "stem_dictionary": "", + "truncate_len": 100, "store": True, }, ], From 427be744127687324317f8738d37749530210d91 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 12:35:36 +0200 Subject: [PATCH 265/288] fix(test): check for versions not prefixed with `v` on skip util --- tests/utils/version.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/utils/version.py b/tests/utils/version.py index ba3ca93..a7d375c 100644 --- a/tests/utils/version.py +++ b/tests/utils/version.py @@ -10,8 +10,13 @@ def is_v30_or_above(client: Client) -> bool: if version == "nightly": return True try: - numbered = str(version).split("v")[1] - return int(numbered) >= 30 + version_str = str(version) + if version_str.startswith("v"): + numbered = version_str.split("v", 1)[1] + else: + numbered = version_str + major_version = numbered.split(".", 1)[0] + return int(major_version) >= 30 except Exception: return False except Exception: From 9aeebc5f20090bf323f3ec3d48cd6d544809ad73 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:13:28 +0200 Subject: [PATCH 266/288] chore: lint --- src/typesense/analytics.py | 12 +----------- src/typesense/analytics_rule.py | 9 --------- src/typesense/curation_set.py | 3 +-- src/typesense/curation_sets.py | 11 +++-------- src/typesense/synonym.py | 2 +- src/typesense/synonym_set.py | 4 +++- src/typesense/synonym_sets.py | 7 ++----- 7 files changed, 11 insertions(+), 37 deletions(-) diff --git a/src/typesense/analytics.py b/src/typesense/analytics.py index 3463748..c4a09e2 100644 --- a/src/typesense/analytics.py +++ b/src/typesense/analytics.py @@ -1,15 +1,8 @@ """Client for Typesense Analytics module.""" -import sys - -if sys.version_info >= (3, 11): - import typing -else: - import typing_extensions as typing - -from typesense.api_call import ApiCall from typesense.analytics_events import AnalyticsEvents from typesense.analytics_rules import AnalyticsRules +from typesense.api_call import ApiCall class Analytics: @@ -19,6 +12,3 @@ def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call self.rules = AnalyticsRules(api_call) self.events = AnalyticsEvents(api_call) - - - diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index d9c21b2..fba11ce 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,12 +1,5 @@ """Per-rule client for Analytics rules operations.""" -import sys - -if sys.version_info >= (3, 11): - import typing -else: - import typing_extensions as typing - from typesense.api_call import ApiCall from typesense.types.analytics import AnalyticsRule @@ -36,5 +29,3 @@ def delete(self) -> AnalyticsRule: entity_type=AnalyticsRule, ) return response - - diff --git a/src/typesense/curation_set.py b/src/typesense/curation_set.py index f0db7e4..3828161 100644 --- a/src/typesense/curation_set.py +++ b/src/typesense/curation_set.py @@ -11,7 +11,6 @@ from typesense.types.curation_set import ( CurationSetSchema, CurationSetDeleteSchema, - CurationSetUpsertSchema, CurationSetListItemResponseSchema, CurationItemSchema, CurationItemDeleteSchema, @@ -61,7 +60,7 @@ def list_items( } # Filter out None values to avoid sending them clean_params: typing.Dict[str, int] = { - k: v for k, v in params.items() if v is not None # type: ignore[dict-item] + k: v for k, v in params.items() if v is not None } response: CurationSetListItemResponseSchema = self.api_call.get( self._items_path, diff --git a/src/typesense/curation_sets.py b/src/typesense/curation_sets.py index d257f42..b13303e 100644 --- a/src/typesense/curation_sets.py +++ b/src/typesense/curation_sets.py @@ -8,14 +8,11 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.curation_set import CurationSet from typesense.types.curation_set import ( CurationSetSchema, - CurationSetUpsertSchema, CurationSetsListResponseSchema, - CurationSetListItemResponseSchema, - CurationItemDeleteSchema, - CurationSetDeleteSchema, - CurationItemSchema, + CurationSetUpsertSchema, ) @@ -33,7 +30,7 @@ def retrieve(self) -> CurationSetsListResponseSchema: ) return response - def __getitem__(self, curation_set_name: str) -> "CurationSet": + def __getitem__(self, curation_set_name: str) -> CurationSet: from typesense.curation_set import CurationSet as PerSet return PerSet(self.api_call, curation_set_name) @@ -49,5 +46,3 @@ def upsert( entity_type=CurationSetSchema, ) return response - - diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 4d5b73b..53f9bd3 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -23,9 +23,9 @@ from typesense.api_call import ApiCall from typesense.logger import logger +from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema _synonym_deprecation_warned = False -from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema class Synonym: diff --git a/src/typesense/synonym_set.py b/src/typesense/synonym_set.py index daa9c7d..e00401c 100644 --- a/src/typesense/synonym_set.py +++ b/src/typesense/synonym_set.py @@ -57,7 +57,9 @@ def list_items( "offset": offset, } clean_params: typing.Dict[str, int] = { - k: v for k, v in params.items() if v is not None # type: ignore[dict-item] + k: v + for k, v in params.items() + if v is not None } response: typing.List[SynonymItemSchema] = self.api_call.get( self._items_path, diff --git a/src/typesense/synonym_sets.py b/src/typesense/synonym_sets.py index a1a38e5..543e77c 100644 --- a/src/typesense/synonym_sets.py +++ b/src/typesense/synonym_sets.py @@ -8,10 +8,9 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.synonym_set import SynonymSet from typesense.types.synonym_set import ( SynonymSetCreateSchema, - SynonymSetDeleteSchema, - SynonymSetRetrieveSchema, SynonymSetSchema, ) @@ -30,7 +29,7 @@ def retrieve(self) -> typing.List[SynonymSetSchema]: ) return response - def __getitem__(self, synonym_set_name: str) -> "SynonymSet": + def __getitem__(self, synonym_set_name: str) -> SynonymSet: from typesense.synonym_set import SynonymSet as PerSet return PerSet(self.api_call, synonym_set_name) @@ -46,5 +45,3 @@ def upsert( entity_type=SynonymSetSchema, ) return response - - From dd3e2869623599344f4d1c9fbfe7d230976391d8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:13:44 +0200 Subject: [PATCH 267/288] fix(curation_set): add discriminated union types for curation sets --- src/typesense/types/curation_set.py | 51 +++++++++++++++++++++++------ 1 file changed, 41 insertions(+), 10 deletions(-) diff --git a/src/typesense/types/curation_set.py b/src/typesense/types/curation_set.py index f3d3729..a19ee0f 100644 --- a/src/typesense/types/curation_set.py +++ b/src/typesense/types/curation_set.py @@ -25,18 +25,47 @@ class CurationExcludeSchema(typing.TypedDict): id: str -class CurationRuleSchema(typing.TypedDict, total=False): +class CurationRuleTagsSchema(typing.TypedDict): """ - Schema representing rule conditions for a curation item. + Schema for a curation rule using tags. """ - query: typing.NotRequired[str] - match: typing.NotRequired[typing.Literal["exact", "contains"]] - filter_by: typing.NotRequired[str] - tags: typing.NotRequired[typing.List[str]] + tags: typing.List[str] + + +class CurationRuleQuerySchema(typing.TypedDict): + """ + Schema for a curation rule using query and match. + """ + + query: str + match: typing.Literal["exact", "contains"] -class CurationItemSchema(typing.TypedDict, total=False): +class CurationRuleFilterBySchema(typing.TypedDict): + """ + Schema for a curation rule using filter_by. + """ + + filter_by: str + + +CurationRuleSchema = typing.Union[ + CurationRuleTagsSchema, + CurationRuleQuerySchema, + CurationRuleFilterBySchema, +] +""" +Schema representing rule conditions for a curation item. + +A curation rule must be exactly one of: +- A tags-based rule: `{ tags: string[] }` +- A query-based rule: `{ query: string; match: "exact" | "contains" }` +- A filter_by-based rule: `{ filter_by: string }` +""" + + +class CurationItemSchema(typing.TypedDict): """ Schema for a single curation item (aka CurationObject in the API). """ @@ -51,7 +80,9 @@ class CurationItemSchema(typing.TypedDict, total=False): remove_matched_tokens: typing.NotRequired[bool] filter_curated_hits: typing.NotRequired[bool] stop_processing: typing.NotRequired[bool] - metadata: typing.Dict[str, typing.Any] + effective_from_ts: typing.NotRequired[int] + effective_to_ts: typing.NotRequired[int] + metadata: typing.NotRequired[typing.Dict[str, typing.Any]] class CurationSetUpsertSchema(typing.TypedDict): @@ -62,12 +93,12 @@ class CurationSetUpsertSchema(typing.TypedDict): items: typing.List[CurationItemSchema] -class CurationSetSchema(CurationSetUpsertSchema): +class CurationSetSchema(CurationSetUpsertSchema, total=False): """ Response schema for a curation set. """ - name: str + name: typing.NotRequired[str] class CurationSetsListEntrySchema(typing.TypedDict): From 59c850d655ee8269ba19bc17465426fc95615af2 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:27:34 +0200 Subject: [PATCH 268/288] fix(analytics): rename analytics rule type to schema to avoid mypy issues --- src/typesense/analytics_rule.py | 6 +++--- src/typesense/analytics_rules.py | 37 +++++++++++++++++--------------- src/typesense/types/analytics.py | 5 ++--- 3 files changed, 25 insertions(+), 23 deletions(-) diff --git a/src/typesense/analytics_rule.py b/src/typesense/analytics_rule.py index fba11ce..86b516d 100644 --- a/src/typesense/analytics_rule.py +++ b/src/typesense/analytics_rule.py @@ -1,7 +1,7 @@ """Per-rule client for Analytics rules operations.""" from typesense.api_call import ApiCall -from typesense.types.analytics import AnalyticsRule +from typesense.types.analytics import AnalyticsRuleSchema class AnalyticsRule: @@ -15,7 +15,7 @@ def _endpoint_path(self) -> str: return "/".join([AnalyticsRules.resource_path, self.rule_name]) - def retrieve(self) -> AnalyticsRule: + def retrieve(self) -> AnalyticsRuleSchema: response: AnalyticsRule = self.api_call.get( self._endpoint_path, as_json=True, @@ -23,7 +23,7 @@ def retrieve(self) -> AnalyticsRule: ) return response - def delete(self) -> AnalyticsRule: + def delete(self) -> AnalyticsRuleSchema: response: AnalyticsRule = self.api_call.delete( self._endpoint_path, entity_type=AnalyticsRule, diff --git a/src/typesense/analytics_rules.py b/src/typesense/analytics_rules.py index 2097e0b..a95dc60 100644 --- a/src/typesense/analytics_rules.py +++ b/src/typesense/analytics_rules.py @@ -7,10 +7,11 @@ else: import typing_extensions as typing +from typesense.analytics_rule import AnalyticsRule from typesense.api_call import ApiCall from typesense.types.analytics import ( - AnalyticsRule, AnalyticsRuleCreate, + AnalyticsRuleSchema, AnalyticsRuleUpdate, ) @@ -20,40 +21,42 @@ class AnalyticsRules(object): def __init__(self, api_call: ApiCall) -> None: self.api_call = api_call - self.rules: typing.Dict[str, "AnalyticsRule"] = {} + self.rules: typing.Dict[str, AnalyticsRuleSchema] = {} - def __getitem__(self, rule_name: str) -> "AnalyticsRule": + def __getitem__(self, rule_name: str) -> AnalyticsRuleSchema: if rule_name not in self.rules: - from typesense.analytics_rule import AnalyticsRule as PerRule + self.rules[rule_name] = AnalyticsRule(self.api_call, rule_name) + return self.rules[rule_name] - self.rules[rule_name] = PerRule(self.api_call, rule_name) - return typing.cast("AnalyticsRule", self.rules[rule_name]) - - def create(self, rule: AnalyticsRuleCreate) -> AnalyticsRule: - response: AnalyticsRule = self.api_call.post( + def create(self, rule: AnalyticsRuleCreate) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.post( AnalyticsRules.resource_path, body=rule, as_json=True, - entity_type=AnalyticsRule, + entity_type=AnalyticsRuleSchema, ) return response - def retrieve(self, *, rule_tag: typing.Union[str, None] = None) -> typing.List[AnalyticsRule]: + def retrieve( + self, *, rule_tag: typing.Union[str, None] = None + ) -> typing.List[AnalyticsRuleSchema]: params: typing.Dict[str, str] = {} if rule_tag: params["rule_tag"] = rule_tag - response: typing.List[AnalyticsRule] = self.api_call.get( + response: typing.List[AnalyticsRuleSchema] = self.api_call.get( AnalyticsRules.resource_path, params=params if params else None, as_json=True, - entity_type=typing.List[AnalyticsRule], + entity_type=typing.List[AnalyticsRuleSchema], ) return response - def upsert(self, rule_name: str, update: AnalyticsRuleUpdate) -> AnalyticsRule: - response: AnalyticsRule = self.api_call.put( + def upsert( + self, rule_name: str, update: AnalyticsRuleUpdate + ) -> AnalyticsRuleSchema: + response: AnalyticsRuleSchema = self.api_call.put( "/".join([AnalyticsRules.resource_path, rule_name]), body=update, - entity_type=AnalyticsRule, + entity_type=AnalyticsRuleSchema, ) - return response \ No newline at end of file + return response diff --git a/src/typesense/types/analytics.py b/src/typesense/types/analytics.py index 5f5d133..b442f7e 100644 --- a/src/typesense/types/analytics.py +++ b/src/typesense/types/analytics.py @@ -51,6 +51,7 @@ class AnalyticsStatus(typing.TypedDict, total=False): # Rules + class AnalyticsRuleParams(typing.TypedDict, total=False): destination_collection: typing.NotRequired[str] limit: typing.NotRequired[int] @@ -76,7 +77,5 @@ class AnalyticsRuleUpdate(typing.TypedDict, total=False): params: AnalyticsRuleParams -class AnalyticsRule(AnalyticsRuleCreate, total=False): +class AnalyticsRuleSchema(AnalyticsRuleCreate, total=False): pass - - From 8e11da7011a518035ac665665723aa797fdb1431 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:32:41 +0200 Subject: [PATCH 269/288] fix(synonym_set): fix return type for delete_item method --- src/typesense/synonym_set.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/typesense/synonym_set.py b/src/typesense/synonym_set.py index e00401c..0828791 100644 --- a/src/typesense/synonym_set.py +++ b/src/typesense/synonym_set.py @@ -85,11 +85,10 @@ def upsert_item(self, item_id: str, item: SynonymItemSchema) -> SynonymItemSchem ) return response - def delete_item(self, item_id: str) -> typing.Dict[str, str]: + def delete_item(self, item_id: str) -> SynonymItemDeleteSchema: # API returns {"id": "..."} for delete; openapi defines SynonymItemDeleteResponse with name but for items it's id response: SynonymItemDeleteSchema = self.api_call.delete( - "/".join([self._items_path, item_id]), - entity_type=typing.Dict[str, str], + "/".join([self._items_path, item_id]), entity_type=SynonymItemDeleteSchema ) return response From 7a0dcc270097cc34fb111cafd74dddb0c1358b7e Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:33:22 +0200 Subject: [PATCH 270/288] test(curation_set): add integration tests for curation set --- tests/curation_set_test.py | 46 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/tests/curation_set_test.py b/tests/curation_set_test.py index d975b4c..46ed37a 100644 --- a/tests/curation_set_test.py +++ b/tests/curation_set_test.py @@ -8,6 +8,7 @@ from tests.utils.version import is_v30_or_above from typesense.client import Client from typesense.curation_set import CurationSet +from typesense.curation_sets import CurationSets from typesense.types.curation_set import ( CurationItemDeleteSchema, CurationItemSchema, @@ -121,3 +122,48 @@ def test_delete_item(fake_curation_set: CurationSet) -> None: assert res == json_response +def test_actual_retrieve( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can retrieve a curation set from Typesense Server.""" + response = actual_curation_sets["products"].retrieve() + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +def test_actual_delete( + actual_curation_sets: CurationSets, + create_curation_set: None, +) -> None: + """Test that the CurationSet object can delete a curation set from Typesense Server.""" + response = actual_curation_sets["products"].delete() + + print(response) + assert response == {"name": "products"} From 972b9a139c83f1d4b1ca2a8c62f4c4b5d32e19b1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:33:32 +0200 Subject: [PATCH 271/288] test(curation_sets): add integration tests for curation sets --- tests/curation_sets_test.py | 63 +++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/tests/curation_sets_test.py b/tests/curation_sets_test.py index 5f4a270..1d7d92a 100644 --- a/tests/curation_sets_test.py +++ b/tests/curation_sets_test.py @@ -8,6 +8,7 @@ from tests.utils.object_assertions import ( assert_match_object, assert_object_lists_match, + assert_to_contain_object, ) from tests.utils.version import is_v30_or_above from typesense.api_call import ApiCall @@ -110,3 +111,65 @@ def test_upsert(fake_curation_sets: CurationSets) -> None: assert mock.last_request.json() == payload +def test_actual_upsert( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, +) -> None: + """Test that the CurationSets object can upsert a curation set on Typesense Server.""" + response = actual_curation_sets.upsert( + "products", + { + "items": [ + { + "id": "rule-1", + "rule": {"query": "shoe", "match": "contains"}, + "includes": [{"id": "123", "position": 1}], + "excludes": [{"id": "999"}], + } + ] + }, + ) + + assert response == { + "items": [ + { + "excludes": [ + { + "id": "999", + }, + ], + "filter_curated_hits": False, + "id": "rule-1", + "includes": [ + { + "id": "123", + "position": 1, + }, + ], + "remove_matched_tokens": False, + "rule": { + "match": "contains", + "query": "shoe", + }, + "stop_processing": True, + }, + ], + "name": "products", + } + + +def test_actual_retrieve( + actual_curation_sets: CurationSets, + delete_all_curation_sets: None, + create_curation_set: None, +) -> None: + """Test that the CurationSets object can retrieve curation sets from Typesense Server.""" + response = actual_curation_sets.retrieve() + + assert isinstance(response, list) + assert_to_contain_object( + response[0], + { + "name": "products", + }, + ) From 0861103b0b97c19b97444a90937f913912a51a7b Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:41:12 +0200 Subject: [PATCH 272/288] ci: upgrade typesense version to v30 on ci --- .github/workflows/test-and-lint.yml | 30 ++++++------ tests/analytics_events_test.py | 48 ++++++++++---------- tests/analytics_rule_test.py | 13 +++--- tests/analytics_rule_v1_test.py | 20 ++++---- tests/analytics_rules_test.py | 13 +++--- tests/analytics_rules_v1_test.py | 19 ++++---- tests/analytics_test.py | 13 +++++- tests/analytics_v1_test.py | 15 ++++-- tests/api_call_test.py | 8 ++-- tests/collections_test.py | 6 +-- tests/curation_set_test.py | 5 +- tests/curation_sets_test.py | 9 +--- tests/fixtures/analytics_fixtures.py | 3 +- tests/fixtures/analytics_rule_v1_fixtures.py | 2 - tests/fixtures/curation_set_fixtures.py | 2 - tests/fixtures/synonym_set_fixtures.py | 2 - tests/metrics_test.py | 2 +- tests/override_test.py | 10 ++-- tests/overrides_test.py | 11 +++-- tests/synonym_set_items_test.py | 6 +-- tests/synonym_set_test.py | 14 ++---- tests/synonym_sets_test.py | 10 +--- tests/synonym_test.py | 4 +- tests/synonyms_test.py | 4 +- tests/utils/version.py | 2 - 25 files changed, 131 insertions(+), 140 deletions(-) diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index 678254e..9552400 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -12,22 +12,24 @@ jobs: strategy: matrix: python-version: ["3.9", "3.10", "3.11", "3.12"] - services: - typesense: - image: typesense/typesense:28.0 - ports: - - 8108:8108 - volumes: - - /tmp/typesense-data:/data - - /tmp/typesense-analytics:/analytics - env: - TYPESENSE_API_KEY: xyz - TYPESENSE_DATA_DIR: /data - TYPESENSE_ENABLE_CORS: true - TYPESENSE_ANALYTICS_DIR: /analytics - TYPESENSE_ENABLE_SEARCH_ANALYTICS: true steps: + - name: Start Typesense + run: | + docker run -d \ + -p 8108:8108 \ + --name typesense \ + -v /tmp/typesense-data:/data \ + -v /tmp/typesense-analytics-data:/analytics-data \ + typesense/typesense:30.0.alpha1 \ + --api-key=xyz \ + --data-dir=/data \ + --enable-search-analytics=true \ + --analytics-dir=/analytics-data \ + --analytics-flush-interval=60 \ + --analytics-minute-rate-limit=50 \ + --enable-cors + - name: Wait for Typesense run: | timeout 20 bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' localhost:8108/health)" != "200" ]]; do sleep 1; done' || false diff --git a/tests/analytics_events_test.py b/tests/analytics_events_test.py index 81af690..34243ba 100644 --- a/tests/analytics_events_test.py +++ b/tests/analytics_events_test.py @@ -1,27 +1,33 @@ """Tests for Analytics events endpoints (client.analytics.events).""" + from __future__ import annotations import pytest +import requests_mock from tests.utils.version import is_v30_or_above from typesense.client import Client -import requests_mock - from typesense.types.analytics import AnalyticsEvent - pytestmark = pytest.mark.skipif( not is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), reason="Run analytics events tests only on v30+", ) -def test_actual_create_event(actual_client: Client, delete_all: None, create_collection: None, delete_all_analytics_rules: None) -> None: +def test_actual_create_event( + actual_client: Client, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: actual_client.analytics.rules.create( { "name": "company_analytics_rule", @@ -61,7 +67,9 @@ def test_status(actual_client: Client, delete_all: None) -> None: assert isinstance(status, dict) -def test_retrieve_events(actual_client: Client, delete_all: None, delete_all_analytics_rules: None) -> None: +def test_retrieve_events( + actual_client: Client, delete_all: None, delete_all_analytics_rules: None +) -> None: actual_client.analytics.rules.create( { "name": "company_analytics_rule", @@ -89,19 +97,12 @@ def test_retrieve_events(actual_client: Client, delete_all: None, delete_all_ana assert "events" in result - -def test_retrieve_events(fake_client: Client) -> None: - with requests_mock.Mocker() as mock: - mock.get( - "http://nearest:8108/analytics/events", - json={"events": [{"name": "company_analytics_rule"}]}, - ) - result = fake_client.analytics.events.retrieve( - user_id="user-1", name="company_analytics_rule", n=10 - ) - assert "events" in result - -def test_acutal_retrieve_events(actual_client: Client, delete_all: None, create_collection: None, delete_all_analytics_rules: None) -> None: +def test_acutal_retrieve_events( + actual_client: Client, + delete_all: None, + create_collection: None, + delete_all_analytics_rules: None, +) -> None: actual_client.analytics.rules.create( { "name": "company_analytics_rule", @@ -126,6 +127,7 @@ def test_acutal_retrieve_events(actual_client: Client, delete_all: None, create_ ) assert "events" in result + def test_acutal_flush(actual_client: Client, delete_all: None) -> None: resp = actual_client.analytics.events.flush() assert resp["ok"] in [True, False] @@ -136,5 +138,3 @@ def test_flush(fake_client: Client) -> None: mock.post("http://nearest:8108/analytics/flush", json={"ok": True}) resp = fake_client.analytics.events.flush() assert resp["ok"] is True - - diff --git a/tests/analytics_rule_test.py b/tests/analytics_rule_test.py index 68b9122..199e7ae 100644 --- a/tests/analytics_rule_test.py +++ b/tests/analytics_rule_test.py @@ -1,4 +1,5 @@ """Unit tests for per-rule AnalyticsRule operations.""" + from __future__ import annotations import pytest @@ -12,10 +13,12 @@ pytestmark = pytest.mark.skipif( not is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), reason="Run analytics tests only on v30+", ) @@ -63,5 +66,3 @@ def test_actual_rule_delete( ) -> None: resp = actual_analytics_rules["company_analytics_rule"].delete() assert resp["name"] == "company_analytics_rule" - - diff --git a/tests/analytics_rule_v1_test.py b/tests/analytics_rule_v1_test.py index 4e3534c..d30b002 100644 --- a/tests/analytics_rule_v1_test.py +++ b/tests/analytics_rule_v1_test.py @@ -1,4 +1,5 @@ """Tests for the AnalyticsRuleV1 class.""" + from __future__ import annotations import pytest @@ -14,14 +15,17 @@ pytestmark = pytest.mark.skipif( is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), - reason="Skip AnalyticsV1 tests on v30+" + reason="Skip AnalyticsV1 tests on v30+", ) + def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsRuleV1 object is initialized correctly.""" analytics_rule = AnalyticsRuleV1(fake_api_call, "company_analytics_rule") @@ -42,7 +46,6 @@ def test_init(fake_api_call: ApiCall) -> None: ) - def test_retrieve(fake_analytics_rule: AnalyticsRuleV1) -> None: """Test that the AnalyticsRuleV1 object can retrieve an analytics_rule.""" json_response: RuleSchemaForQueries = { @@ -73,7 +76,6 @@ def test_retrieve(fake_analytics_rule: AnalyticsRuleV1) -> None: assert response == json_response - def test_delete(fake_analytics_rule: AnalyticsRuleV1) -> None: """Test that the AnalyticsRuleV1 object can delete an analytics_rule.""" json_response: RuleDeleteSchema = { @@ -96,7 +98,6 @@ def test_delete(fake_analytics_rule: AnalyticsRuleV1) -> None: assert response == json_response - def test_actual_retrieve( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -119,7 +120,6 @@ def test_actual_retrieve( assert response == expected - def test_actual_delete( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -133,5 +133,3 @@ def test_actual_delete( "name": "company_analytics_rule", } assert response == expected - - diff --git a/tests/analytics_rules_test.py b/tests/analytics_rules_test.py index 81fce0b..70f16f5 100644 --- a/tests/analytics_rules_test.py +++ b/tests/analytics_rules_test.py @@ -1,4 +1,5 @@ """Tests for v30 Analytics Rules endpoints (client.analytics.rules).""" + from __future__ import annotations import pytest @@ -13,10 +14,12 @@ pytestmark = pytest.mark.skipif( not is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), reason="Run v30 analytics tests only on v30+", ) @@ -130,5 +133,3 @@ def test_actual_retrieve( rules = actual_analytics_rules.retrieve() assert isinstance(rules, list) assert any(r.get("name") == "company_analytics_rule" for r in rules) - - diff --git a/tests/analytics_rules_v1_test.py b/tests/analytics_rules_v1_test.py index 6ea2d91..7eb2749 100644 --- a/tests/analytics_rules_v1_test.py +++ b/tests/analytics_rules_v1_test.py @@ -1,4 +1,5 @@ """Tests for the AnalyticsRulesV1 class.""" + from __future__ import annotations import pytest @@ -17,14 +18,17 @@ pytestmark = pytest.mark.skipif( is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), - reason="Skip AnalyticsV1 tests on v30+" + reason="Skip AnalyticsV1 tests on v30+", ) + def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsRulesV1 object is initialized correctly.""" analytics_rules = AnalyticsRulesV1(fake_api_call) @@ -150,7 +154,6 @@ def test_create(fake_analytics_rules: AnalyticsRulesV1) -> None: } - def test_actual_create( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -182,7 +185,6 @@ def test_actual_create( } - def test_actual_update( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -213,7 +215,6 @@ def test_actual_update( } - def test_actual_retrieve( actual_analytics_rules: AnalyticsRulesV1, delete_all: None, @@ -235,5 +236,3 @@ def test_actual_retrieve( "type": "nohits_queries", }, ) - - diff --git a/tests/analytics_test.py b/tests/analytics_test.py index a7e2276..2ff12b6 100644 --- a/tests/analytics_test.py +++ b/tests/analytics_test.py @@ -1,4 +1,5 @@ """Tests for the AnalyticsV1 class.""" + import pytest from tests.utils.version import is_v30_or_above from typesense.client import Client @@ -7,7 +8,17 @@ from typesense.api_call import ApiCall -@pytest.mark.skipif(not is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +@pytest.mark.skipif( + not is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsV1 object is initialized correctly.""" analytics = Analytics(fake_api_call) diff --git a/tests/analytics_v1_test.py b/tests/analytics_v1_test.py index 50b9339..f617b7b 100644 --- a/tests/analytics_v1_test.py +++ b/tests/analytics_v1_test.py @@ -1,4 +1,5 @@ """Tests for the AnalyticsV1 class.""" + import pytest from tests.utils.version import is_v30_or_above from typesense.client import Client @@ -7,7 +8,17 @@ from typesense.api_call import ApiCall -@pytest.mark.skipif(is_v30_or_above(Client({"api_key": "xyz", "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}]})), reason="Skip AnalyticsV1 tests on v30+") +@pytest.mark.skipif( + is_v30_or_above( + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) + ), + reason="Skip AnalyticsV1 tests on v30+", +) def test_init(fake_api_call: ApiCall) -> None: """Test that the AnalyticsV1 object is initialized correctly.""" analytics = AnalyticsV1(fake_api_call) @@ -23,5 +34,3 @@ def test_init(fake_api_call: ApiCall) -> None: ) assert not analytics.rules.rules - - diff --git a/tests/api_call_test.py b/tests/api_call_test.py index e13c056..96acadf 100644 --- a/tests/api_call_test.py +++ b/tests/api_call_test.py @@ -100,7 +100,7 @@ def test_get_error_message_with_invalid_json() -> None: response.status_code = 400 # Set an invalid JSON string that would cause JSONDecodeError response._content = b'{"message": "Error occurred", "details": {"key": "value"' - + error_message = RequestHandler._get_error_message(response) assert "API error: Invalid JSON response:" in error_message assert '{"message": "Error occurred", "details": {"key": "value"' in error_message @@ -112,7 +112,7 @@ def test_get_error_message_with_valid_json() -> None: response.headers["Content-Type"] = "application/json" response.status_code = 400 response._content = b'{"message": "Error occurred", "details": {"key": "value"}}' - + error_message = RequestHandler._get_error_message(response) assert error_message == "Error occurred" @@ -122,8 +122,8 @@ def test_get_error_message_with_non_json_content_type() -> None: response = requests.Response() response.headers["Content-Type"] = "text/plain" response.status_code = 400 - response._content = b'Not a JSON content' - + response._content = b"Not a JSON content" + error_message = RequestHandler._get_error_message(response) assert error_message == "API error." diff --git a/tests/collections_test.py b/tests/collections_test.py index 55142ae..d742652 100644 --- a/tests/collections_test.py +++ b/tests/collections_test.py @@ -86,7 +86,7 @@ def test_retrieve(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], }, { "created_at": 1619711488, @@ -106,7 +106,7 @@ def test_retrieve(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], }, ] with requests_mock.Mocker() as mock: @@ -140,7 +140,7 @@ def test_create(fake_collections: Collections) -> None: "num_documents": 0, "symbols_to_index": [], "token_separators": [], - "synonym_sets": [] + "synonym_sets": [], } with requests_mock.Mocker() as mock: diff --git a/tests/curation_set_test.py b/tests/curation_set_test.py index 46ed37a..d8c4075 100644 --- a/tests/curation_set_test.py +++ b/tests/curation_set_test.py @@ -17,15 +17,12 @@ CurationSetSchema, ) - pytestmark = pytest.mark.skipif( not is_v30_or_above( Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), diff --git a/tests/curation_sets_test.py b/tests/curation_sets_test.py index 1d7d92a..82091d5 100644 --- a/tests/curation_sets_test.py +++ b/tests/curation_sets_test.py @@ -16,15 +16,12 @@ from typesense.curation_sets import CurationSets from typesense.types.curation_set import CurationSetSchema, CurationSetUpsertSchema - pytestmark = pytest.mark.skipif( not is_v30_or_above( Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), @@ -105,9 +102,7 @@ def test_upsert(fake_curation_sets: CurationSets) -> None: assert mock.call_count == 1 assert mock.called is True assert mock.last_request.method == "PUT" - assert ( - mock.last_request.url == "http://nearest:8108/curation_sets/products" - ) + assert mock.last_request.url == "http://nearest:8108/curation_sets/products" assert mock.last_request.json() == payload diff --git a/tests/fixtures/analytics_fixtures.py b/tests/fixtures/analytics_fixtures.py index a95c8b5..9097294 100644 --- a/tests/fixtures/analytics_fixtures.py +++ b/tests/fixtures/analytics_fixtures.py @@ -66,6 +66,7 @@ def fake_analytics_rule_fixture(fake_api_call: ApiCall) -> AnalyticsRule: """Return an AnalyticsRule object with test values.""" return AnalyticsRule(fake_api_call, "company_analytics_rule") + @pytest.fixture(scope="function", name="create_query_collection") def create_query_collection_fixture() -> None: """Create a query collection for analytics rules in the Typesense server.""" @@ -91,4 +92,4 @@ def create_query_collection_fixture() -> None: json=query_collection_data, timeout=3, ) - response.raise_for_status() \ No newline at end of file + response.raise_for_status() diff --git a/tests/fixtures/analytics_rule_v1_fixtures.py b/tests/fixtures/analytics_rule_v1_fixtures.py index 44994eb..0dca1d0 100644 --- a/tests/fixtures/analytics_rule_v1_fixtures.py +++ b/tests/fixtures/analytics_rule_v1_fixtures.py @@ -66,5 +66,3 @@ def actual_analytics_rules_v1_fixture(actual_api_call: ApiCall) -> AnalyticsRule def fake_analytics_rule_v1_fixture(fake_api_call: ApiCall) -> AnalyticsRuleV1: """Return a AnalyticsRule object with test values.""" return AnalyticsRuleV1(fake_api_call, "company_analytics_rule") - - diff --git a/tests/fixtures/curation_set_fixtures.py b/tests/fixtures/curation_set_fixtures.py index 6ab184c..3fc61b5 100644 --- a/tests/fixtures/curation_set_fixtures.py +++ b/tests/fixtures/curation_set_fixtures.py @@ -69,5 +69,3 @@ def fake_curation_sets_fixture(fake_api_call: ApiCall) -> CurationSets: def fake_curation_set_fixture(fake_api_call: ApiCall) -> CurationSet: """Return a CurationSet object with test values.""" return CurationSet(fake_api_call, "products") - - diff --git a/tests/fixtures/synonym_set_fixtures.py b/tests/fixtures/synonym_set_fixtures.py index c4c4341..41ad3bb 100644 --- a/tests/fixtures/synonym_set_fixtures.py +++ b/tests/fixtures/synonym_set_fixtures.py @@ -69,5 +69,3 @@ def fake_synonym_sets_fixture(fake_api_call: ApiCall) -> SynonymSets: def fake_synonym_set_fixture(fake_api_call: ApiCall) -> SynonymSet: """Return a SynonymSet object with test values.""" return SynonymSet(fake_api_call, "test-set") - - diff --git a/tests/metrics_test.py b/tests/metrics_test.py index 1e1ea47..01bb9fa 100644 --- a/tests/metrics_test.py +++ b/tests/metrics_test.py @@ -23,4 +23,4 @@ def test_actual_retrieve(actual_metrics: Metrics) -> None: assert "typesense_memory_mapped_bytes" in response assert "typesense_memory_metadata_bytes" in response assert "typesense_memory_resident_bytes" in response - assert "typesense_memory_retained_bytes" in response \ No newline at end of file + assert "typesense_memory_retained_bytes" in response diff --git a/tests/override_test.py b/tests/override_test.py index 0886bc5..eba0dee 100644 --- a/tests/override_test.py +++ b/tests/override_test.py @@ -20,10 +20,12 @@ pytestmark = pytest.mark.skipif( is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), reason="Run override tests only on less than v30", ) diff --git a/tests/overrides_test.py b/tests/overrides_test.py index 4593961..e543bea 100644 --- a/tests/overrides_test.py +++ b/tests/overrides_test.py @@ -18,14 +18,17 @@ pytestmark = pytest.mark.skipif( is_v30_or_above( - Client({ - "api_key": "xyz", - "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], - }) + Client( + { + "api_key": "xyz", + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], + } + ) ), reason="Run override tests only on less than v30", ) + def test_init(fake_api_call: ApiCall) -> None: """Test that the Overrides object is initialized correctly.""" overrides = Overrides(fake_api_call, "companies") diff --git a/tests/synonym_set_items_test.py b/tests/synonym_set_items_test.py index 0fb55d7..2cc1dc6 100644 --- a/tests/synonym_set_items_test.py +++ b/tests/synonym_set_items_test.py @@ -19,9 +19,7 @@ Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), @@ -81,5 +79,3 @@ def test_delete_item(fake_synonym_set: SynonymSet) -> None: ) res = fake_synonym_set.delete_item("nike") assert res == json_response - - diff --git a/tests/synonym_set_test.py b/tests/synonym_set_test.py index ee6650d..b64aa5c 100644 --- a/tests/synonym_set_test.py +++ b/tests/synonym_set_test.py @@ -19,9 +19,7 @@ Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), @@ -68,8 +66,7 @@ def test_retrieve(fake_synonym_set: SynonymSet) -> None: assert len(mock.request_history) == 1 assert mock.request_history[0].method == "GET" assert ( - mock.request_history[0].url - == "http://nearest:8108/synonym_sets/test-set" + mock.request_history[0].url == "http://nearest:8108/synonym_sets/test-set" ) assert response == json_response @@ -90,8 +87,7 @@ def test_delete(fake_synonym_set: SynonymSet) -> None: assert len(mock.request_history) == 1 assert mock.request_history[0].method == "DELETE" assert ( - mock.request_history[0].url - == "http://nearest:8108/synonym_sets/test-set" + mock.request_history[0].url == "http://nearest:8108/synonym_sets/test-set" ) assert response == json_response @@ -112,7 +108,7 @@ def test_actual_retrieve( "root": "", "synonyms": ["companies", "corporations", "firms"], } - ] + ], } @@ -124,5 +120,3 @@ def test_actual_delete( response = actual_synonym_sets["test-set"].delete() assert response == {"name": "test-set"} - - diff --git a/tests/synonym_sets_test.py b/tests/synonym_sets_test.py index 24cea59..fd0e532 100644 --- a/tests/synonym_sets_test.py +++ b/tests/synonym_sets_test.py @@ -25,9 +25,7 @@ Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), @@ -109,9 +107,7 @@ def test_create(fake_synonym_sets: SynonymSets) -> None: assert mock.call_count == 1 assert mock.called is True assert mock.last_request.method == "PUT" - assert ( - mock.last_request.url == "http://nearest:8108/synonym_sets/test-set" - ) + assert mock.last_request.url == "http://nearest:8108/synonym_sets/test-set" assert mock.last_request.json() == payload @@ -159,5 +155,3 @@ def test_actual_retrieve( "name": "test-set", }, ) - - diff --git a/tests/synonym_test.py b/tests/synonym_test.py index d25d937..0b2922c 100644 --- a/tests/synonym_test.py +++ b/tests/synonym_test.py @@ -23,9 +23,7 @@ Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), diff --git a/tests/synonyms_test.py b/tests/synonyms_test.py index 81ae716..22f8a0c 100644 --- a/tests/synonyms_test.py +++ b/tests/synonyms_test.py @@ -22,9 +22,7 @@ Client( { "api_key": "xyz", - "nodes": [ - {"host": "localhost", "port": 8108, "protocol": "http"} - ], + "nodes": [{"host": "localhost", "port": 8108, "protocol": "http"}], } ) ), diff --git a/tests/utils/version.py b/tests/utils/version.py index a7d375c..33b9151 100644 --- a/tests/utils/version.py +++ b/tests/utils/version.py @@ -21,5 +21,3 @@ def is_v30_or_above(client: Client) -> bool: return False except Exception: return False - - From 60fda9587a313ae05c5d010997a2207aa7c505d0 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:47:41 +0200 Subject: [PATCH 273/288] fix(test): create the companies collection before creating the rule --- tests/analytics_events_test.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/analytics_events_test.py b/tests/analytics_events_test.py index 34243ba..b970e2c 100644 --- a/tests/analytics_events_test.py +++ b/tests/analytics_events_test.py @@ -70,6 +70,15 @@ def test_status(actual_client: Client, delete_all: None) -> None: def test_retrieve_events( actual_client: Client, delete_all: None, delete_all_analytics_rules: None ) -> None: + actual_client.collections.create( + { + "name": "companies", + "fields": [ + {"name": "user_id", "type": "string"}, + ], + } + ) + actual_client.analytics.rules.create( { "name": "company_analytics_rule", From 29291eb258155ab5c5cbf766dd6a0d67ca35979d Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Wed, 29 Oct 2025 13:49:46 +0200 Subject: [PATCH 274/288] chore(config): use `logger.warning` instead of deprecated `warn` function --- src/typesense/configuration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index d59ac5e..f21b8cb 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -371,7 +371,7 @@ def show_deprecation_warnings(config_dict: ConfigDict) -> None: to check for deprecated fields. """ if config_dict.get("timeout_seconds"): - logger.warn( + logger.warning( " ".join( [ "Deprecation warning: timeout_seconds is now renamed", @@ -381,7 +381,7 @@ def show_deprecation_warnings(config_dict: ConfigDict) -> None: ) if config_dict.get("master_node"): - logger.warn( + logger.warning( " ".join( [ "Deprecation warning: master_node is now consolidated", @@ -391,7 +391,7 @@ def show_deprecation_warnings(config_dict: ConfigDict) -> None: ) if config_dict.get("read_replica_nodes"): - logger.warn( + logger.warning( " ".join( [ "Deprecation warning: read_replica_nodes is now", From 4e851d8a9e1a4da9397b66779682eaa390ed3576 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 13:03:48 +0200 Subject: [PATCH 275/288] feat(curation): register curation sets to main client object --- src/typesense/client.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/typesense/client.py b/src/typesense/client.py index 92354b2..88ba60e 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -43,6 +43,7 @@ from typesense.collections import Collections from typesense.configuration import ConfigDict, Configuration from typesense.conversations_models import ConversationsModels +from typesense.curation_sets import CurationSets from typesense.debug import Debug from typesense.keys import Keys from typesense.metrics import Metrics @@ -74,6 +75,7 @@ class Client: aliases (Aliases): Instance for managing collection aliases. analyticsV1 (AnalyticsV1): Instance for analytics operations (V1). analytics (AnalyticsV30): Instance for analytics operations (v30). + curation_sets (CurationSets): Instance for Curation Sets (v30+) stemming (Stemming): Instance for stemming dictionary operations. operations (Operations): Instance for various Typesense operations. debug (Debug): Instance for debug operations. @@ -107,6 +109,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.analyticsV1 = AnalyticsV1(self.api_call) self.analytics = Analytics(self.api_call) self.stemming = Stemming(self.api_call) + self.curation_sets = CurationSets(self.api_call) self.operations = Operations(self.api_call) self.debug = Debug(self.api_call) self.stopwords = Stopwords(self.api_call) From ff8f76fc725058a58d50b8d129d3e98ca4e1f081 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 13:04:04 +0200 Subject: [PATCH 276/288] docs(client): update docs for client object --- src/typesense/client.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/typesense/client.py b/src/typesense/client.py index 88ba60e..81f67ca 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -74,7 +74,7 @@ class Client: keys (Keys): Instance for managing API keys. aliases (Aliases): Instance for managing collection aliases. analyticsV1 (AnalyticsV1): Instance for analytics operations (V1). - analytics (AnalyticsV30): Instance for analytics operations (v30). + analytics (Analytics): Instance for analytics operations (v30). curation_sets (CurationSets): Instance for Curation Sets (v30+) stemming (Stemming): Instance for stemming dictionary operations. operations (Operations): Instance for various Typesense operations. @@ -95,8 +95,10 @@ def __init__(self, config_dict: ConfigDict) -> None: Example: >>> config = { ... "api_key": "your_api_key", - ... "nodes": [{"host": "localhost", "port": "8108", "protocol": "http"}], - ... "connection_timeout_seconds": 2 + ... "nodes": [ + ... {"host": "localhost", "port": "8108", "protocol": "http"} + ... ], + ... "connection_timeout_seconds": 2, ... } >>> client = Client(config) """ @@ -143,7 +145,6 @@ def typed_collection( >>> class Company(DocumentSchema): ... name: str ... num_employees: int - ... >>> client = Client(config) >>> companies_collection = client.typed_collection(model=Company) # This is equivalent to: From d55bcb7f9275578526c688aa7bb44014825451bf Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 15:13:50 +0200 Subject: [PATCH 277/288] chore: add typing-extensions to dependency list --- pyproject.toml | 2 +- uv.lock | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1f26b2c..56fb095 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", ] -dependencies = ["requests"] +dependencies = ["requests", "typing-extensions"] dynamic = ["version"] [project.urls] diff --git a/uv.lock b/uv.lock index 0846166..376f24b 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.10'", @@ -440,6 +440,7 @@ name = "typesense" source = { virtual = "." } dependencies = [ { name = "requests" }, + { name = "typing-extensions" }, ] [package.dev-dependencies] @@ -457,7 +458,10 @@ dev = [ ] [package.metadata] -requires-dist = [{ name = "requests" }] +requires-dist = [ + { name = "requests" }, + { name = "typing-extensions" }, +] [package.metadata.requires-dev] dev = [ From ffe325f9b7e8873c4747f52f85993ef78ed1a180 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 15:15:17 +0200 Subject: [PATCH 278/288] feat: add suppress_deprecation_warnings configuration option - add suppress_deprecation_warnings field to configdict typeddict - initialize suppress_deprecation_warnings in configuration class - default to false to maintain existing warning behavior --- src/typesense/configuration.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index f21b8cb..d82408d 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -80,6 +80,8 @@ class ConfigDict(typing.TypedDict): dictionaries or URLs that represent the read replica nodes. connection_timeout_seconds (float): The connection timeout in seconds. + + suppress_deprecation_warnings (bool): Whether to suppress deprecation warnings. """ nodes: typing.List[typing.Union[str, NodeConfigDict]] @@ -96,6 +98,7 @@ class ConfigDict(typing.TypedDict): typing.List[typing.Union[str, NodeConfigDict]] ] # deprecated connection_timeout_seconds: typing.NotRequired[float] + suppress_deprecation_warnings: typing.NotRequired[bool] class Node: @@ -220,6 +223,7 @@ def __init__( ) self.verify = config_dict.get("verify", True) self.additional_headers = config_dict.get("additional_headers", {}) + self.suppress_deprecation_warnings = config_dict.get("suppress_deprecation_warnings", False) def _handle_nearest_node( self, From d89728c46b7ed53f137cde91536e0031c0dfb8d1 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 15:14:36 +0200 Subject: [PATCH 279/288] feat: add deprecation warning decorator system - add warn_deprecation decorator for method deprecation warnings - track shown warnings to prevent duplicate messages - support configurable suppression via suppress_deprecation_warnings - integrate with apicall configuration for warning control --- src/typesense/logger.py | 72 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/src/typesense/logger.py b/src/typesense/logger.py index 1be7890..2834e28 100644 --- a/src/typesense/logger.py +++ b/src/typesense/logger.py @@ -1,6 +1,78 @@ """Logging configuration for the Typesense Python client.""" +import functools import logging +import sys + +if sys.version_info >= (3, 11): + import typing +else: + import typing_extensions as typing logger = logging.getLogger("typesense") logger.setLevel(logging.WARN) + +_deprecation_warnings: typing.Dict[str, bool] = {} + +if sys.version_info >= (3, 11): + from typing import ParamSpec, TypeVar +else: + from typing_extensions import ParamSpec, TypeVar + +P = ParamSpec("P") +R = TypeVar("R") + + +def warn_deprecation( + message: str, + *, + flag_name: typing.Union[str, None] = None, +) -> typing.Callable[[typing.Callable[P, R]], typing.Callable[P, R]]: + """ + Decorator to warn about deprecation when a method is called. + + This decorator will log a deprecation warning once per flag_name when the + decorated method is called. The warning is only shown once to avoid spam. + + Args: + message: The deprecation warning message to display. + flag_name: Optional name for the warning flag. If not provided, a default + name will be generated based on the function's module and name. + + Returns: + A decorator function that wraps the target method. + + Example: + >>> @warn_deprecation("This method is deprecated", flag_name="my_method") + ... def my_method(self): + ... return "result" + """ + + def decorator(func: typing.Callable[P, R]) -> typing.Callable[P, R]: + if flag_name is None: + flag = f"{func.__module__}.{func.__qualname__}" + else: + flag = flag_name + + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + suppress_warnings = False + if ( + args + and len(args) > 1 + and args[1] + and args[1].__class__.__name__ == "ApiCall" + and hasattr(args[1], "config") + ): + suppress_warnings = getattr( + args[1].config, "suppress_deprecation_warnings", False + ) + + if not suppress_warnings and not _deprecation_warnings.get(flag, False): + logger.warning(f"Deprecation warning: {message}") + _deprecation_warnings[flag] = True + return func(*args, **kwargs) + + return typing.cast(typing.Callable[P, R], wrapper) + + return decorator From e45871d26b1f7207667e1562e54b6feeed2dfd66 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 15:17:57 +0200 Subject: [PATCH 280/288] refactor: migrate deprecated apis to use warn_deprecation decorator - add warn_deprecation decorator to analytics_rule_v1 and analytics_rules_v1 - add warn_deprecation decorator to override and overrides classes - add warn_deprecation decorator to synonym and synonyms classes - remove manual deprecation warning code and global flags - replace manual logger.warning calls with decorator-based warnings --- src/typesense/analytics_rule_v1.py | 5 +++++ src/typesense/analytics_rules_v1.py | 6 ++++++ src/typesense/override.py | 6 ++++++ src/typesense/overrides.py | 5 +++++ src/typesense/synonym.py | 20 ++++++-------------- src/typesense/synonyms.py | 20 ++++++-------------- 6 files changed, 34 insertions(+), 28 deletions(-) diff --git a/src/typesense/analytics_rule_v1.py b/src/typesense/analytics_rule_v1.py index dc6890d..e3f8fc0 100644 --- a/src/typesense/analytics_rule_v1.py +++ b/src/typesense/analytics_rule_v1.py @@ -28,6 +28,7 @@ import typing_extensions as typing from typesense.api_call import ApiCall +from typesense.logger import warn_deprecation from typesense.types.analytics_rule_v1 import ( RuleDeleteSchema, RuleSchemaForCounters, @@ -47,6 +48,10 @@ class AnalyticsRuleV1: rule_id (str): The ID of the analytics rule. """ + @warn_deprecation( + "AnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead.", + flag_name="analytics_rules_v1_deprecation", + ) def __init__(self, api_call: ApiCall, rule_id: str): """ Initialize the AnalyticsRuleV1 object. diff --git a/src/typesense/analytics_rules_v1.py b/src/typesense/analytics_rules_v1.py index a850d37..c099726 100644 --- a/src/typesense/analytics_rules_v1.py +++ b/src/typesense/analytics_rules_v1.py @@ -27,6 +27,8 @@ import sys +from typesense.logger import warn_deprecation + if sys.version_info >= (3, 11): import typing else: @@ -63,6 +65,10 @@ class AnalyticsRulesV1(object): resource_path: typing.Final[str] = "/analytics/rules" + @warn_deprecation( + "AnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", + flag_name="analytics_rules_v1_deprecation", + ) def __init__(self, api_call: ApiCall): """ Initialize the AnalyticsRulesV1 object. diff --git a/src/typesense/override.py b/src/typesense/override.py index 478a6d8..12a700d 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -22,6 +22,7 @@ """ from typesense.api_call import ApiCall +from typesense.logger import warn_deprecation from typesense.types.override import OverrideDeleteSchema, OverrideSchema @@ -38,6 +39,11 @@ class Override: override_id (str): The ID of the override. """ + @warn_deprecation( + "The override API (collections/{collection}/overrides/{override_id}) is deprecated is removed on v30+. " + "Use curation sets (curation_sets) instead.", + flag_name="overrides_deprecation", + ) def __init__( self, api_call: ApiCall, diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index 2674f42..d0d7941 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -30,6 +30,7 @@ import sys from typesense.api_call import ApiCall +from typesense.logger import warn_deprecation from typesense.override import Override from typesense.types.override import ( OverrideCreateSchema, @@ -59,6 +60,10 @@ class Overrides: resource_path: typing.Final[str] = "overrides" + @warn_deprecation( + "Overrides is deprecated on v30+. Use client.curation_sets instead.", + flag_name="overrides_deprecation", + ) def __init__( self, api_call: ApiCall, diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 53f9bd3..4119620 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -22,11 +22,9 @@ """ from typesense.api_call import ApiCall -from typesense.logger import logger +from typesense.logger import warn_deprecation from typesense.types.synonym import SynonymDeleteSchema, SynonymSchema -_synonym_deprecation_warned = False - class Synonym: """ @@ -41,6 +39,11 @@ class Synonym: synonym_id (str): The ID of the synonym. """ + @warn_deprecation( + "The synonym API (collections/{collection}/synonyms/{synonym_id}) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) def __init__( self, api_call: ApiCall, @@ -66,7 +69,6 @@ def retrieve(self) -> SynonymSchema: Returns: SynonymSchema: The schema containing the synonym details. """ - self._maybe_warn_deprecation() return self.api_call.get(self._endpoint_path(), entity_type=SynonymSchema) def delete(self) -> SynonymDeleteSchema: @@ -76,7 +78,6 @@ def delete(self) -> SynonymDeleteSchema: Returns: SynonymDeleteSchema: The schema containing the deletion response. """ - self._maybe_warn_deprecation() return self.api_call.delete( self._endpoint_path(), entity_type=SynonymDeleteSchema, @@ -100,12 +101,3 @@ def _endpoint_path(self) -> str: self.synonym_id, ], ) - - def _maybe_warn_deprecation(self) -> None: - global _synonym_deprecation_warned - if not _synonym_deprecation_warned: - logger.warning( - "The synonyms API (collections/{collection}/synonyms) is deprecated and will be " - "removed in a future release. Use synonym sets (synonym_sets) instead." - ) - _synonym_deprecation_warned = True diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index c1bd6b7..6660984 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -28,15 +28,13 @@ import sys from typesense.api_call import ApiCall +from typesense.logger import warn_deprecation from typesense.synonym import Synonym from typesense.types.synonym import ( SynonymCreateSchema, SynonymSchema, SynonymsRetrieveSchema, ) -from typesense.logger import logger - -_synonyms_deprecation_warned = False if sys.version_info >= (3, 11): import typing @@ -60,6 +58,11 @@ class Synonyms: resource_path: typing.Final[str] = "synonyms" + @warn_deprecation( + "The synonyms API (collections/{collection}/synonyms) is deprecated is removed on v30+. " + "Use synonym sets (synonym_sets) instead.", + flag_name="synonyms_deprecation", + ) def __init__(self, api_call: ApiCall, collection_name: str): """ Initialize the Synonyms object. @@ -101,7 +104,6 @@ def upsert(self, synonym_id: str, schema: SynonymCreateSchema) -> SynonymSchema: Returns: SynonymSchema: The created or updated synonym. """ - self._maybe_warn_deprecation() response = self.api_call.put( self._endpoint_path(synonym_id), body=schema, @@ -116,7 +118,6 @@ def retrieve(self) -> SynonymsRetrieveSchema: Returns: SynonymsRetrieveSchema: The schema containing all synonyms. """ - self._maybe_warn_deprecation() response = self.api_call.get( self._endpoint_path(), entity_type=SynonymsRetrieveSchema, @@ -144,12 +145,3 @@ def _endpoint_path(self, synonym_id: typing.Union[str, None] = None) -> str: synonym_id, ], ) - - def _maybe_warn_deprecation(self) -> None: - global _synonyms_deprecation_warned - if not _synonyms_deprecation_warned: - logger.warning( - "The synonyms API (collections/{collection}/synonyms) is deprecated and will be " - "removed in a future release. Use synonym sets (synonym_sets) instead." - ) - _synonyms_deprecation_warned = True From d56213051340c94690570ad66d1226db4556dbd8 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 15:19:02 +0200 Subject: [PATCH 281/288] feat: add typing deprecation decorators to deprecated apis - add @deprecated decorator to analytics_rule_v1, analytics_v1 classes - add @deprecated decorator to override, overrides, synonyms classes - convert analyticsV1 to private attribute with deprecated property - convert overrides and synonyms to private attributes with deprecated properties - remove manual deprecation warning code from analytics_v1 - enable static type checker warnings for deprecated apis --- src/typesense/analytics_rule_v1.py | 9 ++++++--- src/typesense/analytics_rules_v1.py | 4 +--- src/typesense/analytics_v1.py | 13 +++---------- src/typesense/client.py | 14 ++++++++++++-- src/typesense/collection.py | 22 ++++++++++++++++++++-- src/typesense/override.py | 5 ++++- src/typesense/overrides.py | 7 +++++-- src/typesense/synonym.py | 2 +- src/typesense/synonyms.py | 7 +++++-- 9 files changed, 57 insertions(+), 26 deletions(-) diff --git a/src/typesense/analytics_rule_v1.py b/src/typesense/analytics_rule_v1.py index e3f8fc0..87a156d 100644 --- a/src/typesense/analytics_rule_v1.py +++ b/src/typesense/analytics_rule_v1.py @@ -27,6 +27,8 @@ else: import typing_extensions as typing +from typing_extensions import deprecated + from typesense.api_call import ApiCall from typesense.logger import warn_deprecation from typesense.types.analytics_rule_v1 import ( @@ -36,6 +38,9 @@ ) +@deprecated( + "AnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead." +) class AnalyticsRuleV1: """ Class for managing individual analytics rules in Typesense (V1). @@ -48,7 +53,7 @@ class AnalyticsRuleV1: rule_id (str): The ID of the analytics rule. """ - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "AnalyticsRuleV1 is deprecated on v30+. Use client.analytics.rules[rule_id] instead.", flag_name="analytics_rules_v1_deprecation", ) @@ -107,5 +112,3 @@ def _endpoint_path(self) -> str: from typesense.analytics_rules_v1 import AnalyticsRulesV1 return "/".join([AnalyticsRulesV1.resource_path, self.rule_id]) - - diff --git a/src/typesense/analytics_rules_v1.py b/src/typesense/analytics_rules_v1.py index c099726..2c93a98 100644 --- a/src/typesense/analytics_rules_v1.py +++ b/src/typesense/analytics_rules_v1.py @@ -65,7 +65,7 @@ class AnalyticsRulesV1(object): resource_path: typing.Final[str] = "/analytics/rules" - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "AnalyticsRulesV1 is deprecated on v30+. Use client.analytics instead.", flag_name="analytics_rules_v1_deprecation", ) @@ -167,5 +167,3 @@ def retrieve(self) -> RulesRetrieveSchema: entity_type=RulesRetrieveSchema, ) return response - - diff --git a/src/typesense/analytics_v1.py b/src/typesense/analytics_v1.py index cbacc4b..657af6c 100644 --- a/src/typesense/analytics_v1.py +++ b/src/typesense/analytics_v1.py @@ -17,13 +17,13 @@ versions through the use of the typing_extensions library. """ +from typing_extensions import deprecated + from typesense.analytics_rules_v1 import AnalyticsRulesV1 from typesense.api_call import ApiCall -from typesense.logger import logger - -_analytics_v1_deprecation_warned = False +@deprecated("AnalyticsV1 is deprecated on v30+. Use client.analytics instead.") class AnalyticsV1(object): """ Class for managing analytics in Typesense (V1). @@ -46,13 +46,6 @@ def __init__(self, api_call: ApiCall) -> None: @property def rules(self) -> AnalyticsRulesV1: - global _analytics_v1_deprecation_warned - if not _analytics_v1_deprecation_warned: - logger.warning( - "AnalyticsV1 is deprecated and will be removed in a future release. " - "Use client.analytics instead." - ) - _analytics_v1_deprecation_warned = True return self._rules diff --git a/src/typesense/client.py b/src/typesense/client.py index 81f67ca..19cae3a 100644 --- a/src/typesense/client.py +++ b/src/typesense/client.py @@ -28,6 +28,8 @@ import sys +from typing_extensions import deprecated + from typesense.types.document import DocumentSchema if sys.version_info >= (3, 11): @@ -36,8 +38,8 @@ import typing_extensions as typing from typesense.aliases import Aliases -from typesense.analytics_v1 import AnalyticsV1 from typesense.analytics import Analytics +from typesense.analytics_v1 import AnalyticsV1 from typesense.api_call import ApiCall from typesense.collection import Collection from typesense.collections import Collections @@ -108,7 +110,7 @@ def __init__(self, config_dict: ConfigDict) -> None: self.multi_search = MultiSearch(self.api_call) self.keys = Keys(self.api_call) self.aliases = Aliases(self.api_call) - self.analyticsV1 = AnalyticsV1(self.api_call) + self._analyticsV1 = AnalyticsV1(self.api_call) self.analytics = Analytics(self.api_call) self.stemming = Stemming(self.api_call) self.curation_sets = CurationSets(self.api_call) @@ -120,6 +122,14 @@ def __init__(self, config_dict: ConfigDict) -> None: self.conversations_models = ConversationsModels(self.api_call) self.nl_search_models = NLSearchModels(self.api_call) + @property + @deprecated( + "AnalyticsV1 is deprecated on v30+. Use client.analytics instead.", + category=None, + ) + def analyticsV1(self) -> AnalyticsV1: + return self._analyticsV1 + def typed_collection( self, *, diff --git a/src/typesense/collection.py b/src/typesense/collection.py index f648ebf..a898656 100644 --- a/src/typesense/collection.py +++ b/src/typesense/collection.py @@ -20,6 +20,8 @@ import sys +from typing_extensions import deprecated + from typesense.types.collection import CollectionSchema, CollectionUpdateSchema if sys.version_info >= (3, 11): @@ -63,8 +65,24 @@ def __init__(self, api_call: ApiCall, name: str): self.name = name self.api_call = api_call self.documents: Documents[TDoc] = Documents(api_call, name) - self.overrides = Overrides(api_call, name) - self.synonyms = Synonyms(api_call, name) + self._overrides = Overrides(api_call, name) + self._synonyms = Synonyms(api_call, name) + + @property + @deprecated( + "Synonyms is deprecated on v30+. Use client.synonym_sets instead.", + category=None, + ) + def synonyms(self) -> Synonyms: + return self._synonyms + + @property + @deprecated( + "Overrides is deprecated on v30+. Use client.curation_sets instead.", + category=None, + ) + def overrides(self) -> Overrides: + return self._overrides def retrieve(self) -> CollectionSchema: """ diff --git a/src/typesense/override.py b/src/typesense/override.py index 12a700d..a9613b0 100644 --- a/src/typesense/override.py +++ b/src/typesense/override.py @@ -21,11 +21,14 @@ versions through the use of the typing_extensions library. """ +from typing_extensions import deprecated + from typesense.api_call import ApiCall from typesense.logger import warn_deprecation from typesense.types.override import OverrideDeleteSchema, OverrideSchema +@deprecated("Override is deprecated on v30+. Use client.curation_sets instead.") class Override: """ Class for managing individual overrides in a Typesense collection. @@ -39,7 +42,7 @@ class Override: override_id (str): The ID of the override. """ - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "The override API (collections/{collection}/overrides/{override_id}) is deprecated is removed on v30+. " "Use curation sets (curation_sets) instead.", flag_name="overrides_deprecation", diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index d0d7941..4f8bc80 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -29,6 +29,8 @@ import sys +from typing_extensions import deprecated + from typesense.api_call import ApiCall from typesense.logger import warn_deprecation from typesense.override import Override @@ -44,6 +46,7 @@ import typing_extensions as typing +@deprecated("Overrides is deprecated on v30+. Use client.curation_sets instead.") class Overrides: """ Class for managing overrides in a Typesense collection. @@ -60,7 +63,7 @@ class Overrides: resource_path: typing.Final[str] = "overrides" - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "Overrides is deprecated on v30+. Use client.curation_sets instead.", flag_name="overrides_deprecation", ) @@ -68,7 +71,7 @@ def __init__( self, api_call: ApiCall, collection_name: str, - ) -> None: + ) -> None: """ Initialize the Overrides object. diff --git a/src/typesense/synonym.py b/src/typesense/synonym.py index 4119620..6bea97d 100644 --- a/src/typesense/synonym.py +++ b/src/typesense/synonym.py @@ -39,7 +39,7 @@ class Synonym: synonym_id (str): The ID of the synonym. """ - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "The synonym API (collections/{collection}/synonyms/{synonym_id}) is deprecated is removed on v30+. " "Use synonym sets (synonym_sets) instead.", flag_name="synonyms_deprecation", diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 6660984..3a5622f 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -27,6 +27,8 @@ import sys +from typing_extensions import deprecated + from typesense.api_call import ApiCall from typesense.logger import warn_deprecation from typesense.synonym import Synonym @@ -42,6 +44,7 @@ import typing_extensions as typing +@deprecated("Synonyms is deprecated on v30+. Use client.synonym_sets instead.") class Synonyms: """ Class for managing synonyms in a Typesense collection. @@ -58,12 +61,12 @@ class Synonyms: resource_path: typing.Final[str] = "synonyms" - @warn_deprecation( + @warn_deprecation( # type: ignore[misc] "The synonyms API (collections/{collection}/synonyms) is deprecated is removed on v30+. " "Use synonym sets (synonym_sets) instead.", flag_name="synonyms_deprecation", ) - def __init__(self, api_call: ApiCall, collection_name: str): + def __init__(self, api_call: ApiCall, collection_name: str) -> None: """ Initialize the Synonyms object. From 2792b34e056d130c275380a42947beaf940b0a43 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 16:12:13 +0200 Subject: [PATCH 282/288] fix(types): fix update collection type to include curation & synonyms --- src/typesense/types/collection.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 1ce839c..702fb41 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -225,10 +225,15 @@ class CollectionUpdateSchema(typing.TypedDict): """ - fields: typing.List[ - typing.Union[ - RegularCollectionFieldSchema, - ReferenceCollectionFieldSchema, - DropCollectionFieldSchema, + fields: typing.NotRequired[ + typing.List[ + typing.Union[ + RegularCollectionFieldSchema, + ReferenceCollectionFieldSchema, + DropCollectionFieldSchema, + ] ] ] + synonym_sets: typing.NotRequired[typing.List[str]] + curation_sets: typing.NotRequired[typing.List[str]] + From eaec38677076056df763cf0629d38225fc167165 Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 16:12:27 +0200 Subject: [PATCH 283/288] fix(curation): fix method signature of upserting curation sets --- src/typesense/curation_set.py | 20 +++++++++++++++----- src/typesense/curation_sets.py | 14 -------------- tests/curation_sets_test.py | 5 ++--- 3 files changed, 17 insertions(+), 22 deletions(-) diff --git a/src/typesense/curation_set.py b/src/typesense/curation_set.py index 3828161..7cf53f5 100644 --- a/src/typesense/curation_set.py +++ b/src/typesense/curation_set.py @@ -9,11 +9,12 @@ from typesense.api_call import ApiCall from typesense.types.curation_set import ( - CurationSetSchema, + CurationItemDeleteSchema, + CurationItemSchema, CurationSetDeleteSchema, CurationSetListItemResponseSchema, - CurationItemSchema, - CurationItemDeleteSchema, + CurationSetSchema, + CurationSetUpsertSchema, ) @@ -43,6 +44,17 @@ def delete(self) -> CurationSetDeleteSchema: ) return response + def upsert( + self, + payload: CurationSetUpsertSchema, + ) -> CurationSetSchema: + response: CurationSetSchema = self.api_call.put( + "/".join([self._endpoint_path]), + body=payload, + entity_type=CurationSetSchema, + ) + return response + # Items sub-resource @property def _items_path(self) -> str: @@ -92,5 +104,3 @@ def delete_item(self, item_id: str) -> CurationItemDeleteSchema: entity_type=CurationItemDeleteSchema, ) return response - - diff --git a/src/typesense/curation_sets.py b/src/typesense/curation_sets.py index b13303e..4a30abc 100644 --- a/src/typesense/curation_sets.py +++ b/src/typesense/curation_sets.py @@ -10,9 +10,7 @@ from typesense.api_call import ApiCall from typesense.curation_set import CurationSet from typesense.types.curation_set import ( - CurationSetSchema, CurationSetsListResponseSchema, - CurationSetUpsertSchema, ) @@ -34,15 +32,3 @@ def __getitem__(self, curation_set_name: str) -> CurationSet: from typesense.curation_set import CurationSet as PerSet return PerSet(self.api_call, curation_set_name) - - def upsert( - self, - curation_set_name: str, - payload: CurationSetUpsertSchema, - ) -> CurationSetSchema: - response: CurationSetSchema = self.api_call.put( - "/".join([CurationSets.resource_path, curation_set_name]), - body=payload, - entity_type=CurationSetSchema, - ) - return response diff --git a/tests/curation_sets_test.py b/tests/curation_sets_test.py index 82091d5..88c70bf 100644 --- a/tests/curation_sets_test.py +++ b/tests/curation_sets_test.py @@ -96,7 +96,7 @@ def test_upsert(fake_curation_sets: CurationSets) -> None: } ] } - response = fake_curation_sets.upsert("products", payload) + response = fake_curation_sets["products"].upsert(payload) assert response == json_response assert mock.call_count == 1 @@ -111,8 +111,7 @@ def test_actual_upsert( delete_all_curation_sets: None, ) -> None: """Test that the CurationSets object can upsert a curation set on Typesense Server.""" - response = actual_curation_sets.upsert( - "products", + response = actual_curation_sets["products"].upsert( { "items": [ { From eb147fc688c59b7f68d813d1bf8855e56b4b9d8c Mon Sep 17 00:00:00 2001 From: Fanis Tharropoulos Date: Fri, 14 Nov 2025 16:12:42 +0200 Subject: [PATCH 284/288] fix(synonyms): fix method signature of upserting synonym sets --- src/typesense/synonym_set.py | 21 +++++++++++++-------- src/typesense/synonym_sets.py | 13 ------------- tests/synonym_sets_test.py | 6 ++---- 3 files changed, 15 insertions(+), 25 deletions(-) diff --git a/src/typesense/synonym_set.py b/src/typesense/synonym_set.py index 0828791..e9eaae3 100644 --- a/src/typesense/synonym_set.py +++ b/src/typesense/synonym_set.py @@ -9,10 +9,11 @@ from typesense.api_call import ApiCall from typesense.types.synonym_set import ( + SynonymItemDeleteSchema, + SynonymItemSchema, + SynonymSetCreateSchema, SynonymSetDeleteSchema, SynonymSetRetrieveSchema, - SynonymItemSchema, - SynonymItemDeleteSchema, ) @@ -35,13 +36,21 @@ def retrieve(self) -> SynonymSetRetrieveSchema: ) return response + def upsert(self, set: SynonymSetCreateSchema) -> SynonymSetCreateSchema: + response: SynonymSetCreateSchema = self.api_call.put( + self._endpoint_path, + entity_type=SynonymSetCreateSchema, + body=set, + ) + return response + def delete(self) -> SynonymSetDeleteSchema: response: SynonymSetDeleteSchema = self.api_call.delete( self._endpoint_path, entity_type=SynonymSetDeleteSchema, ) return response - + @property def _items_path(self) -> str: return "/".join([self._endpoint_path, "items"]) # /synonym_sets/{name}/items @@ -57,9 +66,7 @@ def list_items( "offset": offset, } clean_params: typing.Dict[str, int] = { - k: v - for k, v in params.items() - if v is not None + k: v for k, v in params.items() if v is not None } response: typing.List[SynonymItemSchema] = self.api_call.get( self._items_path, @@ -91,5 +98,3 @@ def delete_item(self, item_id: str) -> SynonymItemDeleteSchema: "/".join([self._items_path, item_id]), entity_type=SynonymItemDeleteSchema ) return response - - diff --git a/src/typesense/synonym_sets.py b/src/typesense/synonym_sets.py index 543e77c..ee4587f 100644 --- a/src/typesense/synonym_sets.py +++ b/src/typesense/synonym_sets.py @@ -10,7 +10,6 @@ from typesense.api_call import ApiCall from typesense.synonym_set import SynonymSet from typesense.types.synonym_set import ( - SynonymSetCreateSchema, SynonymSetSchema, ) @@ -33,15 +32,3 @@ def __getitem__(self, synonym_set_name: str) -> SynonymSet: from typesense.synonym_set import SynonymSet as PerSet return PerSet(self.api_call, synonym_set_name) - - def upsert( - self, - synonym_set_name: str, - payload: SynonymSetCreateSchema, - ) -> SynonymSetSchema: - response: SynonymSetSchema = self.api_call.put( - "/".join([SynonymSets.resource_path, synonym_set_name]), - body=payload, - entity_type=SynonymSetSchema, - ) - return response diff --git a/tests/synonym_sets_test.py b/tests/synonym_sets_test.py index fd0e532..f63c196 100644 --- a/tests/synonym_sets_test.py +++ b/tests/synonym_sets_test.py @@ -19,7 +19,6 @@ SynonymSetSchema, ) - pytestmark = pytest.mark.skipif( not is_v30_or_above( Client( @@ -102,7 +101,7 @@ def test_create(fake_synonym_sets: SynonymSets) -> None: } ] } - fake_synonym_sets.upsert("test-set", payload) + fake_synonym_sets["test-set"].upsert(payload) assert mock.call_count == 1 assert mock.called is True @@ -116,8 +115,7 @@ def test_actual_create( delete_all_synonym_sets: None, ) -> None: """Test that the SynonymSets object can create a synonym set on Typesense Server.""" - response = actual_synonym_sets.upsert( - "test-set", + response = actual_synonym_sets["test-set"].upsert( { "items": [ { From bd8a43dfe5aff297af33672da60998785e2e6e00 Mon Sep 17 00:00:00 2001 From: Kishore Nallan Date: Fri, 14 Nov 2025 20:27:38 +0400 Subject: [PATCH 285/288] Bump version --- src/typesense/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/typesense/__init__.py b/src/typesense/__init__.py index 5f7f548..147e6a8 100644 --- a/src/typesense/__init__.py +++ b/src/typesense/__init__.py @@ -1,4 +1,4 @@ from .client import Client # NOQA -__version__ = "1.2.0" +__version__ = "1.3.0" From 737f571e8c476e0942418ca53c72658c7ca9d323 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Clgen=20Sar=C4=B1kavak?= Date: Sun, 23 Nov 2025 22:36:21 +0300 Subject: [PATCH 286/288] Specify supported Python versions via trove classifiers --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 56fb095..59537c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,10 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = ["requests", "typing-extensions"] dynamic = ["version"] From 9a1d54124f13b955215441c33a53cf8167d89798 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Clgen=20Sar=C4=B1kavak?= Date: Mon, 24 Nov 2025 18:47:00 +0300 Subject: [PATCH 287/288] Enable ruff formatter in CI --- .github/workflows/test-and-lint.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test-and-lint.yml b/.github/workflows/test-and-lint.yml index 9552400..75b6b9c 100644 --- a/.github/workflows/test-and-lint.yml +++ b/.github/workflows/test-and-lint.yml @@ -47,6 +47,7 @@ jobs: - name: Lint with Ruff run: | uv run ruff check src/typesense + uv run ruff format src/typesense - name: Check types with mypy run: | From e77447891903f0a6e8841e62a5efba434b628128 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=9Clgen=20Sar=C4=B1kavak?= Date: Mon, 24 Nov 2025 18:47:45 +0300 Subject: [PATCH 288/288] Apply "ruff format" fixes --- README.md | 3 --- src/typesense/analytics_events.py | 2 -- src/typesense/analytics_v1.py | 2 -- src/typesense/collections.py | 13 ++++++------- src/typesense/configuration.py | 4 +++- src/typesense/overrides.py | 4 ++-- src/typesense/synonyms.py | 4 ++-- src/typesense/types/analytics_rule_v1.py | 2 -- src/typesense/types/collection.py | 1 - src/typesense/types/curation_set.py | 2 -- src/typesense/types/synonym_set.py | 4 +++- 11 files changed, 16 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index bbe9f08..208a7a5 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,3 @@ Bug reports and pull requests are welcome on GitHub at [https://github.com/types ## License `typesense-python` is distributed under the Apache 2 license. - - - diff --git a/src/typesense/analytics_events.py b/src/typesense/analytics_events.py index c462e6c..651591d 100644 --- a/src/typesense/analytics_events.py +++ b/src/typesense/analytics_events.py @@ -69,5 +69,3 @@ def status(self) -> AnalyticsStatus: entity_type=AnalyticsStatus, ) return response - - diff --git a/src/typesense/analytics_v1.py b/src/typesense/analytics_v1.py index 657af6c..baa18a7 100644 --- a/src/typesense/analytics_v1.py +++ b/src/typesense/analytics_v1.py @@ -47,5 +47,3 @@ def __init__(self, api_call: ApiCall) -> None: @property def rules(self) -> AnalyticsRulesV1: return self._rules - - diff --git a/src/typesense/collections.py b/src/typesense/collections.py index 72fa381..dd9fe53 100644 --- a/src/typesense/collections.py +++ b/src/typesense/collections.py @@ -72,7 +72,6 @@ def __contains__(self, collection_name: str) -> bool: """ if collection_name in self.collections: try: # noqa: WPS229, WPS529 - self.collections[collection_name].retrieve() # noqa: WPS529 return True except Exception: @@ -100,7 +99,7 @@ def __getitem__(self, collection_name: str) -> Collection[TDoc]: Example: >>> collections = Collections(api_call) - >>> fruits_collection = collections['fruits'] + >>> fruits_collection = collections["fruits"] """ if not self.collections.get(collection_name): self.collections[collection_name] = Collection( @@ -126,11 +125,11 @@ def create(self, schema: CollectionCreateSchema) -> CollectionSchema: >>> schema = { ... "name": "companies", ... "fields": [ - ... {"name": "company_name", "type": "string" }, - ... {"name": "num_employees", "type": "int32" }, - ... {"name": "country", "type": "string", "facet": True } + ... {"name": "company_name", "type": "string"}, + ... {"name": "num_employees", "type": "int32"}, + ... {"name": "country", "type": "string", "facet": True}, ... ], - ... "default_sorting_field": "num_employees" + ... "default_sorting_field": "num_employees", ... } >>> created_schema = collections.create(schema) """ @@ -154,7 +153,7 @@ def retrieve(self) -> typing.List[CollectionSchema]: >>> collections = Collections(api_call) >>> all_collections = collections.retrieve() >>> for collection in all_collections: - ... print(collection['name']) + ... print(collection["name"]) """ call: typing.List[CollectionSchema] = self.api_call.get( endpoint=Collections.resource_path, diff --git a/src/typesense/configuration.py b/src/typesense/configuration.py index d82408d..1720233 100644 --- a/src/typesense/configuration.py +++ b/src/typesense/configuration.py @@ -223,7 +223,9 @@ def __init__( ) self.verify = config_dict.get("verify", True) self.additional_headers = config_dict.get("additional_headers", {}) - self.suppress_deprecation_warnings = config_dict.get("suppress_deprecation_warnings", False) + self.suppress_deprecation_warnings = config_dict.get( + "suppress_deprecation_warnings", False + ) def _handle_nearest_node( self, diff --git a/src/typesense/overrides.py b/src/typesense/overrides.py index 4f8bc80..8581e93 100644 --- a/src/typesense/overrides.py +++ b/src/typesense/overrides.py @@ -63,7 +63,7 @@ class Overrides: resource_path: typing.Final[str] = "overrides" - @warn_deprecation( # type: ignore[misc] + @warn_deprecation( # type: ignore[misc] "Overrides is deprecated on v30+. Use client.curation_sets instead.", flag_name="overrides_deprecation", ) @@ -71,7 +71,7 @@ def __init__( self, api_call: ApiCall, collection_name: str, - ) -> None: + ) -> None: """ Initialize the Overrides object. diff --git a/src/typesense/synonyms.py b/src/typesense/synonyms.py index 3a5622f..fe5f508 100644 --- a/src/typesense/synonyms.py +++ b/src/typesense/synonyms.py @@ -61,12 +61,12 @@ class Synonyms: resource_path: typing.Final[str] = "synonyms" - @warn_deprecation( # type: ignore[misc] + @warn_deprecation( # type: ignore[misc] "The synonyms API (collections/{collection}/synonyms) is deprecated is removed on v30+. " "Use synonym sets (synonym_sets) instead.", flag_name="synonyms_deprecation", ) - def __init__(self, api_call: ApiCall, collection_name: str) -> None: + def __init__(self, api_call: ApiCall, collection_name: str) -> None: """ Initialize the Synonyms object. diff --git a/src/typesense/types/analytics_rule_v1.py b/src/typesense/types/analytics_rule_v1.py index 3f76046..88ffd00 100644 --- a/src/typesense/types/analytics_rule_v1.py +++ b/src/typesense/types/analytics_rule_v1.py @@ -201,5 +201,3 @@ class RulesRetrieveSchema(typing.TypedDict): """ rules: typing.List[typing.Union[RuleSchemaForQueries, RuleSchemaForCounters]] - - diff --git a/src/typesense/types/collection.py b/src/typesense/types/collection.py index 702fb41..e49fbc0 100644 --- a/src/typesense/types/collection.py +++ b/src/typesense/types/collection.py @@ -236,4 +236,3 @@ class CollectionUpdateSchema(typing.TypedDict): ] synonym_sets: typing.NotRequired[typing.List[str]] curation_sets: typing.NotRequired[typing.List[str]] - diff --git a/src/typesense/types/curation_set.py b/src/typesense/types/curation_set.py index a19ee0f..6468166 100644 --- a/src/typesense/types/curation_set.py +++ b/src/typesense/types/curation_set.py @@ -126,5 +126,3 @@ class CurationSetDeleteSchema(typing.TypedDict): """Response schema for deleting a curation set.""" name: str - - diff --git a/src/typesense/types/synonym_set.py b/src/typesense/types/synonym_set.py index 9d0dfe1..d036411 100644 --- a/src/typesense/types/synonym_set.py +++ b/src/typesense/types/synonym_set.py @@ -29,6 +29,7 @@ class SynonymItemSchema(typing.TypedDict): locale: typing.NotRequired[Locales] symbols_to_index: typing.NotRequired[typing.List[str]] + class SynonymItemDeleteSchema(typing.TypedDict): """ Schema for deleting a synonym item. @@ -36,6 +37,7 @@ class SynonymItemDeleteSchema(typing.TypedDict): id: str + class SynonymSetCreateSchema(typing.TypedDict): """ Schema for creating or updating a synonym set. @@ -73,4 +75,4 @@ class SynonymSetDeleteSchema(typing.TypedDict): name (str): Name of the deleted synonym set. """ - name: str \ No newline at end of file + name: str