From a7ff90da433259e68b61fe3d89d02e208a687a58 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Thu, 16 Jan 2025 13:32:43 +0200 Subject: [PATCH 01/33] The method does not raise CursorStateError (#357) --- arango/cursor.py | 1 - 1 file changed, 1 deletion(-) diff --git a/arango/cursor.py b/arango/cursor.py index 40fc59fe..7c9fabe2 100644 --- a/arango/cursor.py +++ b/arango/cursor.py @@ -332,7 +332,6 @@ def close(self, ignore_missing: bool = False) -> Optional[bool]: smaller than the batch size). :rtype: bool | None :raise arango.exceptions.CursorCloseError: If operation fails. - :raise arango.exceptions.CursorStateError: If cursor ID is not set. """ if self._id is None: return None From 8df2fee0a151a26025750d2756202e65727c5c43 Mon Sep 17 00:00:00 2001 From: Deniz Alpaslan Date: Fri, 31 Jan 2025 13:56:35 +0300 Subject: [PATCH 02/33] add support for sort in collection.find function (#359) * add support for sort in collection.find function * update sort parameter type. add SortValidationError as custom exception * Update arango/collection.py Co-authored-by: Alex Petenchea * Update arango/collection.py Co-authored-by: Alex Petenchea * update utils.py and collection.py to raise SortValidationError * update utils.py for build_sort_expression to accept Jsons or None * Update arango/collection.py --------- Co-authored-by: Deniz Alpaslan Co-authored-by: Alex Petenchea --- .gitignore | 3 +++ arango/collection.py | 10 +++++++++- arango/exceptions.py | 7 +++++++ arango/utils.py | 43 ++++++++++++++++++++++++++++++++++++++++-- docs/document.rst | 6 ++++++ tests/test_document.py | 20 ++++++++++++++++++++ 6 files changed, 86 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index c6ef2445..4fa6f46d 100644 --- a/.gitignore +++ b/.gitignore @@ -124,3 +124,6 @@ arango/version.py # test results *_results.txt + +# devcontainers +.devcontainer diff --git a/arango/collection.py b/arango/collection.py index 446200fb..e2dfcd2a 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -50,11 +50,13 @@ from arango.typings import Fields, Headers, Json, Jsons, Params from arango.utils import ( build_filter_conditions, + build_sort_expression, get_batches, get_doc_id, is_none_or_bool, is_none_or_int, is_none_or_str, + validate_sort_parameters, ) @@ -753,6 +755,7 @@ def find( skip: Optional[int] = None, limit: Optional[int] = None, allow_dirty_read: bool = False, + sort: Optional[Jsons] = None, ) -> Result[Cursor]: """Return all documents that match the given filters. @@ -764,13 +767,18 @@ def find( :type limit: int | None :param allow_dirty_read: Allow reads from followers in a cluster. :type allow_dirty_read: bool + :param sort: Document sort parameters + :type sort: Jsons | None :return: Document cursor. :rtype: arango.cursor.Cursor :raise arango.exceptions.DocumentGetError: If retrieval fails. + :raise arango.exceptions.SortValidationError: If sort parameters are invalid. """ assert isinstance(filters, dict), "filters must be a dict" assert is_none_or_int(skip), "skip must be a non-negative int" assert is_none_or_int(limit), "limit must be a non-negative int" + if sort: + validate_sort_parameters(sort) skip_val = skip if skip is not None else 0 limit_val = limit if limit is not None else "null" @@ -778,9 +786,9 @@ def find( FOR doc IN @@collection {build_filter_conditions(filters)} LIMIT {skip_val}, {limit_val} + {build_sort_expression(sort)} RETURN doc """ - bind_vars = {"@collection": self.name} request = Request( diff --git a/arango/exceptions.py b/arango/exceptions.py index 28295b2b..29bcdc17 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -1074,3 +1074,10 @@ class JWTRefreshError(ArangoClientError): class JWTExpiredError(ArangoClientError): """JWT token has expired.""" + + +################################### +# Parameter Validation Exceptions # +################################### +class SortValidationError(ArangoClientError): + """Invalid sort parameters.""" diff --git a/arango/utils.py b/arango/utils.py index 541f9d0c..0d128db3 100644 --- a/arango/utils.py +++ b/arango/utils.py @@ -11,8 +11,8 @@ from contextlib import contextmanager from typing import Any, Iterator, Sequence, Union -from arango.exceptions import DocumentParseError -from arango.typings import Json +from arango.exceptions import DocumentParseError, SortValidationError +from arango.typings import Json, Jsons @contextmanager @@ -126,3 +126,42 @@ def build_filter_conditions(filters: Json) -> str: conditions.append(f"doc.{field} == {json.dumps(v)}") return "FILTER " + " AND ".join(conditions) + + +def validate_sort_parameters(sort: Sequence[Json]) -> bool: + """Validate sort parameters for an AQL query. + + :param sort: Document sort parameters. + :type sort: Sequence[Json] + :return: Validation success. + :rtype: bool + :raise arango.exceptions.SortValidationError: If sort parameters are invalid. + """ + assert isinstance(sort, Sequence) + for param in sort: + if "sort_by" not in param or "sort_order" not in param: + raise SortValidationError( + "Each sort parameter must have 'sort_by' and 'sort_order'." + ) + if param["sort_order"].upper() not in ["ASC", "DESC"]: + raise SortValidationError("'sort_order' must be either 'ASC' or 'DESC'") + return True + + +def build_sort_expression(sort: Jsons | None) -> str: + """Build a sort condition for an AQL query. + + :param sort: Document sort parameters. + :type sort: Jsons | None + :return: The complete AQL sort condition. + :rtype: str + """ + if not sort: + return "" + + sort_chunks = [] + for sort_param in sort: + chunk = f"doc.{sort_param['sort_by']} {sort_param['sort_order']}" + sort_chunks.append(chunk) + + return "SORT " + ", ".join(sort_chunks) diff --git a/docs/document.rst b/docs/document.rst index 62ad0886..0f0d7d10 100644 --- a/docs/document.rst +++ b/docs/document.rst @@ -103,6 +103,12 @@ Standard documents are managed via collection API wrapper: assert student['GPA'] == 3.6 assert student['last'] == 'Kim' + # Retrieve one or more matching documents, sorted by a field. + for student in students.find({'first': 'John'}, sort=[{'sort_by': 'GPA', 'sort_order': 'DESC'}]): + assert student['_key'] == 'john' + assert student['GPA'] == 3.6 + assert student['last'] == 'Kim' + # Retrieve a document by key. students.get('john') diff --git a/tests/test_document.py b/tests/test_document.py index 37599507..7cb0a435 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -1162,6 +1162,26 @@ def test_document_find(col, bad_col, docs): # Set up test documents col.import_bulk(docs) + # Test find with sort expression (single field) + found = list(col.find({}, sort=[{"sort_by": "text", "sort_order": "ASC"}])) + assert len(found) == 6 + assert found[0]["text"] == "bar" + assert found[-1]["text"] == "foo" + + # Test find with sort expression (multiple fields) + found = list( + col.find( + {}, + sort=[ + {"sort_by": "text", "sort_order": "ASC"}, + {"sort_by": "val", "sort_order": "DESC"}, + ], + ) + ) + assert len(found) == 6 + assert found[0]["val"] == 6 + assert found[-1]["val"] == 1 + # Test find (single match) with default options found = list(col.find({"val": 2})) assert len(found) == 1 From a9e278e59cf4e940b7e2180fb23e3fb60b4661c9 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 7 Feb 2025 23:29:55 +0200 Subject: [PATCH 03/33] Updating fragile test, so the next DB upgrade won't make it fail. (#360) --- tests/test_cursor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_cursor.py b/tests/test_cursor.py index 80c8df28..184d7ed8 100644 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -50,7 +50,7 @@ def test_cursor_from_execute_query(db, col, docs): assert profile["parsing"] > 0 plan = cursor.plan() - assert set(plan.keys()) == { + expected_keys = { "nodes", "rules", "collections", @@ -59,6 +59,8 @@ def test_cursor_from_execute_query(db, col, docs): "estimatedNrItems", "isModificationQuery", } + for key in expected_keys: + assert key in plan assert clean_doc(cursor.next()) == docs[0] assert cursor.id == cursor_id From 392390e45d053c6e9cf6bb7f2b5f19f38826b7db Mon Sep 17 00:00:00 2001 From: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> Date: Mon, 24 Feb 2025 10:13:34 -0500 Subject: [PATCH 04/33] attempt: 3.12 CI (#362) --- .circleci/config.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 138cfbc6..6c7e8ae6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,8 +11,7 @@ workflows: name: Python (<< matrix.python_version >>) - ArangoDB (<< matrix.arangodb_license >>, << matrix.arangodb_version >> << matrix.arangodb_config >>) matrix: parameters: - # TODO: Revisit why pyenv doesn't recognize 3.12 - python_version: ["3.8", "3.9", "3.10", "3.11"] # "3.12" + python_version: ["3.9", "3.10", "3.11", "3.12"] arangodb_config: ["single", "cluster"] arangodb_license: ["community", "enterprise"] arangodb_version: ["3.11", "latest"] From b26e3b047bdf0acd4706038425c8261960a5d202 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 24 Feb 2025 22:06:53 +0530 Subject: [PATCH 05/33] 3.12.4 updates (#361) * Adding usePlanCache option for AQL queries. * Fixing Python 3.9 error * Dropping Python 3.8 support * Preparing new release * Adding errors script * Updating errors script * Updating docs * Removing errors script --- README.md | 2 +- arango/aql.py | 39 +- arango/errno.py | 1013 +++++++++++++++++++-------------------------- arango/request.py | 2 +- arango/utils.py | 4 +- docs/conf.py | 2 +- docs/index.rst | 2 +- pyproject.toml | 3 +- setup.cfg | 2 +- tests/test_aql.py | 82 ++++ 10 files changed, 548 insertions(+), 603 deletions(-) diff --git a/README.md b/README.md index 88d7f007..ad5dee47 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ database natively supporting documents, graphs and search. ## Requirements - ArangoDB version 3.11+ -- Python version 3.8+ +- Python version 3.9+ ## Installation diff --git a/arango/aql.py b/arango/aql.py index 8552d0b2..25786302 100644 --- a/arango/aql.py +++ b/arango/aql.py @@ -144,6 +144,36 @@ def response_handler(resp: Response) -> bool: return self._execute(request, response_handler) + def plan_entries(self) -> Result[Jsons]: + """Return a list of all AQL query plan cache entries. + + :return: List of AQL query plan cache entries. + :rtype: list + :raise arango.exceptions.AQLCacheEntriesError: If retrieval fails. + """ + request = Request(method="get", endpoint="/_api/query-plan-cache") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise AQLCacheEntriesError(resp, request) + result: Jsons = resp.body + return result + + return self._execute(request, response_handler) + + def clear_plan(self) -> Result[None]: + """Clear the AQL query plan cache. + + :raises arango.exceptions.AQLCacheClearError: If clearing the cache fails. + """ + request = Request(method="delete", endpoint="/_api/query-plan-cache") + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise AQLCacheClearError(resp, request) + + return self._execute(request, response_handler) + class AQL(ApiGroup): """AQL (ArangoDB Query Language) API wrapper. @@ -277,6 +307,7 @@ def execute( allow_dirty_read: bool = False, allow_retry: bool = False, force_one_shard_attribute_value: Optional[str] = None, + use_plan_cache: Optional[bool] = None, ) -> Result[Cursor]: """Execute the query and return the result cursor. @@ -388,6 +419,8 @@ def execute( shipped to a wrong DB server and may not return results (i.e. empty result set). Use at your own risk. :param force_one_shard_attribute_value: str | None + :param use_plan_cache: If set to True, the query plan cache is used. + :param use_plan_cache: bool | None :return: Result cursor. :rtype: arango.cursor.Cursor :raise arango.exceptions.AQLQueryExecuteError: If execute fails. @@ -399,8 +432,6 @@ def execute( data["ttl"] = ttl if bind_vars is not None: data["bindVars"] = bind_vars - if cache is not None: - data["cache"] = cache if memory_limit is not None: data["memoryLimit"] = memory_limit @@ -437,6 +468,10 @@ def execute( options["allowRetry"] = allow_retry if force_one_shard_attribute_value is not None: options["forceOneShardAttributeValue"] = force_one_shard_attribute_value + if cache is not None: + options["cache"] = cache + if use_plan_cache is not None: + options["usePlanCache"] = use_plan_cache if options: data["options"] = options diff --git a/arango/errno.py b/arango/errno.py index bc42bcab..1f48b216 100644 --- a/arango/errno.py +++ b/arango/errno.py @@ -1,1339 +1,1168 @@ ################## -# General Errors # +# General errors # ################## -# No error occurred. +# no error NO_ERROR = 0 -# General error occurred. +# failed FAILED = 1 -# Operating system error occurred. +# system error SYS_ERROR = 2 -# Out of memory. +# out of memory OUT_OF_MEMORY = 3 -# Internal error occurred. +# internal error INTERNAL = 4 -# Illegal number representation given. +# illegal number ILLEGAL_NUMBER = 5 -# Numeric overflow occurred. +# numeric overflow NUMERIC_OVERFLOW = 6 -# Unknown option supplied by user. +# illegal option ILLEGAL_OPTION = 7 -# Detected PID without living process. +# dead process identifier DEAD_PID = 8 -# Feature not implemented. +# not implemented NOT_IMPLEMENTED = 9 -# Bad parameter. +# bad parameter BAD_PARAMETER = 10 -# Missing permission. +# forbidden FORBIDDEN = 11 -# Out of memory (mmap). -OUT_OF_MEMORY_MMAP = 12 - -# Corrupt CSV line. +# csv is corrupt CORRUPTED_CSV = 13 -# File not found. +# file not found FILE_NOT_FOUND = 14 -# Cannot write to file. +# cannot write file CANNOT_WRITE_FILE = 15 -# Cannot overwrite file. +# cannot overwrite file CANNOT_OVERWRITE_FILE = 16 -# Type error occurred. +# type error TYPE_ERROR = 17 -# Timed out waiting for a lock. +# lock timeout LOCK_TIMEOUT = 18 -# Cannot create a directory. +# cannot create directory CANNOT_CREATE_DIRECTORY = 19 -# Cannot create a temporary file. +# cannot create temporary file CANNOT_CREATE_TEMP_FILE = 20 -# Request cancelled by user. +# canceled request REQUEST_CANCELED = 21 -# Raised for debugging. +# intentional debug error DEBUG = 22 -# Invalid IP address. +# IP address is invalid IP_ADDRESS_INVALID = 25 -# File exists already. +# file exists FILE_EXISTS = 27 -# Locked resource or operation. +# locked LOCKED = 28 -# Deadlock detected when accessing collections. +# deadlock detected DEADLOCK = 29 -# Call failed as server shutdown is in progress. +# shutdown in progress SHUTTING_DOWN = 30 -# Feature only for enterprise version of ArangoDB. +# only enterprise version ONLY_ENTERPRISE = 31 -# Resource usage exceeded maximum value. +# resource limit exceeded RESOURCE_LIMIT = 32 -# ICU operation failed. +# icu error: %s ICU_ERROR = 33 -# Cannot read a file. +# cannot read file CANNOT_READ_FILE = 34 -# Incompatible version of ArangoDB. +# incompatible server version INCOMPATIBLE_VERSION = 35 -# Requested resource disabled. +# disabled DISABLED = 36 -# JSON string could not be parsed. +# malformed json MALFORMED_JSON = 37 -# Call cannot succeed because the server startup phase is still in progress. +# startup ongoing STARTING_UP = 38 +# error during deserialization +DESERIALIZE = 39 + +# reached end of file +END_OF_FILE = 40 + ########################### -# HTTP Error Status Codes # +# HTTP error status codes # ########################### -# Bad HTTP parameter. +# bad parameter HTTP_BAD_PARAMETER = 400 -# User unauthorized. +# unauthorized HTTP_UNAUTHORIZED = 401 -# Operation forbidden. +# forbidden HTTP_FORBIDDEN = 403 -# Unknown URI. +# not found HTTP_NOT_FOUND = 404 -# HTTP method unknown. +# method not supported HTTP_METHOD_NOT_ALLOWED = 405 -# HTTP content type not supported. +# request not acceptable HTTP_NOT_ACCEPTABLE = 406 -# Timeout occurred. +# request timeout HTTP_REQUEST_TIMEOUT = 408 -# Conflict occurred in an HTTP operation. +# conflict HTTP_CONFLICT = 409 -# Requested content has been permanently deleted. +# content permanently deleted HTTP_GONE = 410 -# Precondition not met. +# precondition failed HTTP_PRECONDITION_FAILED = 412 -# Internal server error occurred. +# enhance your calm +HTTP_ENHANCE_YOUR_CALM = 420 + +# internal server error HTTP_SERVER_ERROR = 500 -# API is not implemented. +# not implemented HTTP_NOT_IMPLEMENTED = 501 -# Service temporarily unavailable. +# service unavailable HTTP_SERVICE_UNAVAILABLE = 503 -# Service contacted by ArangoDB did not respond in time. +# gateway timeout HTTP_GATEWAY_TIMEOUT = 504 ########################## -# HTTP Processing Errors # +# HTTP processing errors # ########################## -# Corrupted JSON string. +# invalid JSON object HTTP_CORRUPTED_JSON = 600 -# URL contains superfluous suffices. +# superfluous URL suffices HTTP_SUPERFLUOUS_SUFFICES = 601 #################################### -# Internal ArangoDB Storage Errors # +# Internal ArangoDB storage errors # #################################### -# Datafile in illegal state. +# illegal state ILLEGAL_STATE = 1000 -# User attempted to write to a sealed datafile. -DATAFILE_SEALED = 1002 - -# Read-only datafile or collection. +# read only READ_ONLY = 1004 -# Duplicate identifier detected. +# duplicate identifier DUPLICATE_IDENTIFIER = 1005 -# Datafile unreadable. -DATAFILE_UNREADABLE = 1006 - -# Datafile empty. -DATAFILE_EMPTY = 1007 - -# Error occurred during WAL log file recovery. -RECOVERY = 1008 - -# Required datafile statistics object not found. -DATAFILE_STATISTICS_NOT_FOUND = 1009 - #################################### -# External ArangoDB Storage Errors # +# External ArangoDB storage errors # #################################### -# Datafile corrupted. +# corrupted datafile CORRUPTED_DATAFILE = 1100 -# Parameter file corrupted or cannot be read. +# illegal or unreadable parameter file ILLEGAL_PARAMETER_FILE = 1101 -# Collection contains one or more corrupted datafiles. +# corrupted collection CORRUPTED_COLLECTION = 1102 -# System call mmap failed. -MMAP_FAILED = 1103 - -# Filesystem full. +# filesystem full FILESYSTEM_FULL = 1104 -# Cannot create journal. -NO_JOURNAL = 1105 - -# Datafile of the same name already exists. -DATAFILE_ALREADY_EXISTS = 1106 - -# Database directory locked by another process. +# database directory is locked DATADIR_LOCKED = 1107 -# Directory of the same name already exists. -COLLECTION_DIRECTORY_ALREADY_EXISTS = 1108 - -# System call msync failed. -MSYNC_FAILED = 1109 - -# Cannot lock the database directory on startup. -DATADIR_UNLOCKABLE = 1110 - -# Server waited too long for the datafile to be synced to disk. -SYNC_TIMEOUT = 1111 - ################################### -# General ArangoDB Storage Errors # +# General ArangoDB storage errors # ################################### -# Conflict detected while updating or deleting a document. +# conflict CONFLICT = 1200 -# Database directory invalid. -DATADIR_INVALID = 1201 - -# Unknown document identifier or handle. +# document not found DOCUMENT_NOT_FOUND = 1202 -# Collection with given identifier or name unknown. +# collection or view not found DATA_SOURCE_NOT_FOUND = 1203 -# Missing collection parameter. +# parameter 'collection' not found COLLECTION_PARAMETER_MISSING = 1204 -# Invalid document handle. +# illegal document identifier DOCUMENT_HANDLE_BAD = 1205 -# Maximal journal size too small. -MAXIMAL_SIZE_TOO_SMALL = 1206 - -# Duplicate name detected. +# duplicate name DUPLICATE_NAME = 1207 -# Illegal name detected. +# illegal name ILLEGAL_NAME = 1208 -# No suitable index for query. +# no suitable index known NO_INDEX = 1209 -# Unique constraint violation. +# unique constraint violated UNIQUE_CONSTRAINT_VIOLATED = 1210 -# Index with unknown identifier. +# index not found INDEX_NOT_FOUND = 1212 -# Cross-collection requested. +# cross collection request not allowed CROSS_COLLECTION_REQUEST = 1213 -# Index handle corrupted. +# illegal index identifier INDEX_HANDLE_BAD = 1214 -# Document too large to fit into any datafile. +# document too large DOCUMENT_TOO_LARGE = 1216 -# Collection must be unloaded. -COLLECTION_NOT_UNLOADED = 1217 - -# Invalid collection type. +# collection type invalid COLLECTION_TYPE_INVALID = 1218 -# Failed to parse an attribute name definition. +# parsing attribute name definition failed ATTRIBUTE_PARSER_FAILED = 1220 -# Corrupted document key. +# illegal document key DOCUMENT_KEY_BAD = 1221 -# User-defined document key supplied for collections with auto key generation. +# unexpected document key DOCUMENT_KEY_UNEXPECTED = 1222 -# Database directory not writable for current user. +# server database directory not writable DATADIR_NOT_WRITABLE = 1224 -# Key generator out of keys. +# out of keys OUT_OF_KEYS = 1225 -# Document key missing. +# missing document key DOCUMENT_KEY_MISSING = 1226 -# There was an attempt to create a document of invalid type. +# invalid document type DOCUMENT_TYPE_INVALID = 1227 -# Non-existing database accessed. +# database not found DATABASE_NOT_FOUND = 1228 -# Invalid database used. +# database name invalid DATABASE_NAME_INVALID = 1229 -# Operation requested in non-system database. +# operation only allowed in system database USE_SYSTEM_DATABASE = 1230 -# Invalid key generator. +# invalid key generator INVALID_KEY_GENERATOR = 1232 -# Undefined or invalid "_from" or "_to" values in an edge document. +# expecting both `_from` and `_to` attributes to be defined in the edge document and have the format `/` INVALID_EDGE_ATTRIBUTE = 1233 -# Cannot create index. +# index creation failed INDEX_CREATION_FAILED = 1235 -# Server is write-throttled and a write operation waited too long. -WRITE_THROTTLE_TIMEOUT = 1236 - -# Collection type mismatch. +# collection type mismatch COLLECTION_TYPE_MISMATCH = 1237 -# Collection accessed but not yet loaded. +# collection not loaded COLLECTION_NOT_LOADED = 1238 -# Document revision corrupt or missing. +# illegal document revision DOCUMENT_REV_BAD = 1239 -# Read cannot be completed by storage engine. +# incomplete read INCOMPLETE_READ = 1240 +# not supported by old legacy data format +OLD_ROCKSDB_FORMAT = 1241 + +# an index with legacy sorted keys has been found +INDEX_HAS_LEGACY_SORTED_KEYS = 1242 + ################################### -# Checked ArangoDB Storage Errors # +# Checked ArangoDB storage errors # ################################### -# Datafile full. -DATAFILE_FULL = 1300 - -# Server database directory empty. +# server database directory is empty EMPTY_DATADIR = 1301 -# Operation needs to be retried. +# operation should be tried again TRY_AGAIN = 1302 -# Storage engine busy. +# engine is busy BUSY = 1303 -# Datafile merge in progress and the operation cannot be completed. +# merge in progress MERGE_IN_PROGRESS = 1304 -# Storage engine encountered an I/O error. +# storage engine I/O error IO_ERROR = 1305 ############################### -# ArangoDB Replication Errors # +# ArangoDB replication errors # ############################### -# Replication applier received no (or incomplete) response from master. +# no response REPLICATION_NO_RESPONSE = 1400 -# Replication applier received an invalid response from master. +# invalid response REPLICATION_INVALID_RESPONSE = 1401 -# Replication applier received a server error from master. -REPLICATION_MASTER_ERROR = 1402 +# leader error +REPLICATION_LEADER_ERROR = 1402 -# Replication applier tried to connect to master with incompatible version. -REPLICATION_MASTER_INCOMPATIBLE = 1403 +# leader incompatible +REPLICATION_LEADER_INCOMPATIBLE = 1403 -# Replication applier connected to a different master than before. -REPLICATION_MASTER_CHANGE = 1404 +# leader change +REPLICATION_LEADER_CHANGE = 1404 -# Replication applier was asked to connect to itself for replication. +# loop detected REPLICATION_LOOP = 1405 -# Unexpected marker found in replication log stream. +# unexpected marker REPLICATION_UNEXPECTED_MARKER = 1406 -# Found invalid replication applier state file. +# invalid applier state REPLICATION_INVALID_APPLIER_STATE = 1407 -# Found unexpected transaction ID. +# invalid transaction REPLICATION_UNEXPECTED_TRANSACTION = 1408 -# Synchronization of a shard takes longer than the configured timeout. +# shard synchronization attempt timeout exceeded REPLICATION_SHARD_SYNC_ATTEMPT_TIMEOUT_EXCEEDED = 1409 -# Invalid replication applier configuration. +# invalid replication applier configuration REPLICATION_INVALID_APPLIER_CONFIGURATION = 1410 -# Operation attempted while replication applier is running. +# cannot perform operation while applier is running REPLICATION_RUNNING = 1411 -# Replication applier stopped by user. +# replication stopped REPLICATION_APPLIER_STOPPED = 1412 -# Replication applier started without a known start tick value. +# no start tick REPLICATION_NO_START_TICK = 1413 -# Replication applier started without a known start tick value. +# start tick not present REPLICATION_START_TICK_NOT_PRESENT = 1414 -# Newborn follower submits a wrong checksum. +# wrong checksum REPLICATION_WRONG_CHECKSUM = 1416 -# Shard is not empty and follower tries a shortcut. +# shard not empty REPLICATION_SHARD_NONEMPTY = 1417 -# Specific replicated log is not found +# replicated log {} not found REPLICATION_REPLICATED_LOG_NOT_FOUND = 1418 -# Participant of a replicated log is ordered to do something only the leader can do. +# not the log leader REPLICATION_REPLICATED_LOG_NOT_THE_LEADER = 1419 -# Participant of a replicated log is ordered to do something only a follower can do. +# not a log follower REPLICATION_REPLICATED_LOG_NOT_A_FOLLOWER = 1420 -# Follower of a replicated log rejects an append-entries request. +# follower rejected append entries request REPLICATION_REPLICATED_LOG_APPEND_ENTRIES_REJECTED = 1421 -# Leader instance of a replicated log rejects a request because it just resigned. -# This can also happen if the term changes (due to a configuration change). +# a resigned leader instance rejected a request REPLICATION_REPLICATED_LOG_LEADER_RESIGNED = 1422 -# Follower instance of a replicated log rejects a request because it just resigned. -# This can also happen if the term changes (due to a configuration change). +# a resigned follower instance rejected a request REPLICATION_REPLICATED_LOG_FOLLOWER_RESIGNED = 1423 -# Participant instance of a replicated log is no longer available. +# the replicated log of the participant is gone REPLICATION_REPLICATED_LOG_PARTICIPANT_GONE = 1424 -# Participant tries to change its term but found an invalid new term. +# an invalid term was given REPLICATION_REPLICATED_LOG_INVALID_TERM = 1425 -# Participant is currently unconfigured. +# log participant unconfigured REPLICATION_REPLICATED_LOG_UNCONFIGURED = 1426 -# Specific replicated state was not found. +# replicated state {id:} of type {type:} not found REPLICATION_REPLICATED_STATE_NOT_FOUND = 1427 +# replicated state {id:} of type {type:} is unavailable +REPLICATION_REPLICATED_STATE_NOT_AVAILABLE = 1428 + +# not enough replicas for the configured write-concern are present +REPLICATION_WRITE_CONCERN_NOT_FULFILLED = 1429 + +# operation aborted because a previous operation failed +REPLICATION_REPLICATED_LOG_SUBSEQUENT_FAULT = 1430 + +# replicated state type {type:} is unavailable +REPLICATION_REPLICATED_STATE_IMPLEMENTATION_NOT_FOUND = 1431 + +# error in the replicated WAL subsystem +REPLICATION_REPLICATED_WAL_ERROR = 1432 + +# replicated WAL {file:} has an invalid or missing file header +REPLICATION_REPLICATED_WAL_INVALID_FILE = 1433 + +# replicated WAL {file:} is corrupt +REPLICATION_REPLICATED_WAL_CORRUPT = 1434 + ########################### -# ArangoDB Cluster Errors # +# ArangoDB cluster errors # ########################### -# Operation is sent to a non-following server. +# not a follower CLUSTER_NOT_FOLLOWER = 1446 -# Follower transaction already performed an intermediate commit and must be rolled back. +# follower transaction intermediate commit already performed CLUSTER_FOLLOWER_TRANSACTION_COMMIT_PERFORMED = 1447 -# Updating the plan on collection creation failed. +# creating collection failed due to precondition CLUSTER_CREATE_COLLECTION_PRECONDITION_FAILED = 1448 -# Raised on some occasions when one server gets a request from another. +# got a request from an unknown server CLUSTER_SERVER_UNKNOWN = 1449 -# Number of shards for a collection is higher than allowed. +# too many shards CLUSTER_TOO_MANY_SHARDS = 1450 -# Coordinator cannot create a collection as the collection ID already exists. -CLUSTER_COLLECTION_ID_EXISTS = 1453 - -# Coordinator cannot create an entry for a new collection in Plan hierarchy. +# could not create collection in plan CLUSTER_COULD_NOT_CREATE_COLLECTION_IN_PLAN = 1454 -# Coordinator sees DBServer issues when creating shards for a new collection. +# could not create collection CLUSTER_COULD_NOT_CREATE_COLLECTION = 1456 -# Coordinator runs into a timeout for some cluster wide operation. +# timeout in cluster operation CLUSTER_TIMEOUT = 1457 -# Coordinator cannot remove an entry for a collection in Plan hierarchy. +# could not remove collection from plan CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_PLAN = 1458 -# Coordinator cannot remove an entry for a collection in Current hierarchy. -CLUSTER_COULD_NOT_REMOVE_COLLECTION_IN_CURRENT = 1459 - -# Coordinator cannot create an entry for a new database in the Plan hierarchy. +# could not create database in plan CLUSTER_COULD_NOT_CREATE_DATABASE_IN_PLAN = 1460 -# Coordinator sees DBServer issues when creating databases for a new cluster. +# could not create database CLUSTER_COULD_NOT_CREATE_DATABASE = 1461 -# Coordinator cannot remove an entry for a database in the Plan hierarchy. +# could not remove database from plan CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_PLAN = 1462 -# Coordinator cannot remove an entry for a database in the Current hierarchy. +# could not remove database from current CLUSTER_COULD_NOT_REMOVE_DATABASE_IN_CURRENT = 1463 -# Coordinator cannot determine the shard responsible for a given document. +# no responsible shard found CLUSTER_SHARD_GONE = 1464 -# Coordinator loses HTTP connection to a DBServer while transferring data. +# cluster internal HTTP connection broken CLUSTER_CONNECTION_LOST = 1465 -# "_key" attribute specified in sharded collection which uses not only "_key" -# as sharding attribute. +# must not specify _key for this collection CLUSTER_MUST_NOT_SPECIFY_KEY = 1466 -# Coordinator gets conflicting results from different shards. +# got contradicting answers from different shards CLUSTER_GOT_CONTRADICTING_ANSWERS = 1467 -# Coordinator tries to find out the shard responsible for a partial document. +# not all sharding attributes given CLUSTER_NOT_ALL_SHARDING_ATTRIBUTES_GIVEN = 1468 -# Not allowed to update the value of a shard attribute. +# must not change the value of a shard key attribute CLUSTER_MUST_NOT_CHANGE_SHARDING_ATTRIBUTES = 1469 -# Operation not supported in sharded collection. +# unsupported operation or parameter for clusters CLUSTER_UNSUPPORTED = 1470 -# Operation is coordinator-only. +# this operation is only valid on a coordinator in a cluster CLUSTER_ONLY_ON_COORDINATOR = 1471 -# Coordinator or DBServer cannot read the Plan. +# error reading Plan in agency CLUSTER_READING_PLAN_AGENCY = 1472 -# Coordinator cannot truncate all shards of a cluster collection. -CLUSTER_COULD_NOT_TRUNCATE_COLLECTION = 1473 - -# Internal communication of the cluster for AQL produces an error. +# error in cluster internal communication for AQL CLUSTER_AQL_COMMUNICATION = 1474 -# Operation is DBServer-only. +# this operation is only valid on a DBserver in a cluster CLUSTER_ONLY_ON_DBSERVER = 1477 -# Cannot reach a required DBServer. +# A cluster backend which was required for the operation could not be reached CLUSTER_BACKEND_UNAVAILABLE = 1478 -# Required collection out of sync during AQL execution. +# collection/view is out of sync CLUSTER_AQL_COLLECTION_OUT_OF_SYNC = 1481 -# Coordinator cannot create an entry for a new index in Plan hierarchy. +# could not create index in plan CLUSTER_COULD_NOT_CREATE_INDEX_IN_PLAN = 1482 -# Coordinator cannot remove an index from Plan hierarchy. +# could not drop index in plan CLUSTER_COULD_NOT_DROP_INDEX_IN_PLAN = 1483 -# One tries to create a collection with "shards_like" attribute which points -# to another collection that also has one. +# chain of distributeShardsLike references CLUSTER_CHAIN_OF_DISTRIBUTESHARDSLIKE = 1484 -# One tries to drop a collection to which another collection points with its -# "shard_like" attribute. +# must not drop collection while another has a distributeShardsLike attribute pointing to it CLUSTER_MUST_NOT_DROP_COLL_OTHER_DISTRIBUTESHARDSLIKE = 1485 -# One tries to create a collection which points to an unknown collection in its -# "shard_like" attribute. +# must not have a distributeShardsLike attribute pointing to an unknown collection CLUSTER_UNKNOWN_DISTRIBUTESHARDSLIKE = 1486 -# One tries to create a collection with a "replication_factor" greater than the -# available number of DBServers. +# the number of current DB-Servers is lower than the requested replicationFactor/writeConcern CLUSTER_INSUFFICIENT_DBSERVERS = 1487 -# Cannot drop follower. +# a follower could not be dropped in agency CLUSTER_COULD_NOT_DROP_FOLLOWER = 1488 -# Replication operation refused by a shard leader. +# a shard leader refuses to perform a replication operation CLUSTER_SHARD_LEADER_REFUSES_REPLICATION = 1489 -# Non-replication operation refused by a shard follower. +# a shard follower refuses to perform an operation CLUSTER_SHARD_FOLLOWER_REFUSES_OPERATION = 1490 -# Shard leader resigned in the meantime. +# a (former) shard leader refuses to perform an operation CLUSTER_SHARD_LEADER_RESIGNED = 1491 -# Agency operation failed after various retries. +# some agency operation failed CLUSTER_AGENCY_COMMUNICATION_FAILED = 1492 -# Servers currently competing for leadership. +# leadership challenge is ongoing CLUSTER_LEADERSHIP_CHALLENGE_ONGOING = 1495 -# Operation sent to a non-leading server. +# not a leader CLUSTER_NOT_LEADER = 1496 -# Coordinator cannot create an entry for a new view in Plan hierarchy. +# could not create view in plan CLUSTER_COULD_NOT_CREATE_VIEW_IN_PLAN = 1497 -# Coordinator tries to create a view and the ID already exists. +# view ID already exists CLUSTER_VIEW_ID_EXISTS = 1498 -# Coordinator cannot drop a collection entry in Plan hierarchy. +# could not drop collection in plan CLUSTER_COULD_NOT_DROP_COLLECTION = 1499 ######################### -# ArangoDB Query Errors # +# ArangoDB query errors # ######################### -# Running query killed by an explicit admin command. +# query killed QUERY_KILLED = 1500 -# Parsed query syntactically invalid. +# %s QUERY_PARSE = 1501 -# Empty query specified. +# query is empty QUERY_EMPTY = 1502 -# Runtime error caused by query. +# runtime error '%s' QUERY_SCRIPT = 1503 -# Number out of range. +# number out of range QUERY_NUMBER_OUT_OF_RANGE = 1504 -# Geo index coordinate invalid or out of range. +# invalid geo coordinate value QUERY_INVALID_GEO_VALUE = 1505 -# Invalid variable name. +# variable name '%s' has an invalid format QUERY_VARIABLE_NAME_INVALID = 1510 -# Variable redeclared in a query. +# variable '%s' is assigned multiple times QUERY_VARIABLE_REDECLARED = 1511 -# Variable name unknown or undefined. +# unknown variable '%s' QUERY_VARIABLE_NAME_UNKNOWN = 1512 -# Cannot acquire lock on collection. +# unable to read-lock collection %s QUERY_COLLECTION_LOCK_FAILED = 1521 -# Too many collections or shards in a query. +# too many collections/shards QUERY_TOO_MANY_COLLECTIONS = 1522 -# Document attribute redeclared. -QUERY_DOCUMENT_ATTRIBUTE_REDECLARED = 1530 +# too much nesting or too many objects +QUERY_TOO_MUCH_NESTING = 1524 -# Unknown attribute is used inside an OPTIONS clause. +# unknown/invalid OPTIONS attribute used QUERY_INVALID_OPTIONS_ATTRIBUTE = 1539 -# Undefined function called. +# usage of unknown function '%s()' QUERY_FUNCTION_NAME_UNKNOWN = 1540 -# Argument number mismatch. +# invalid number of arguments for function '%s()' QUERY_FUNCTION_ARGUMENT_NUMBER_MISMATCH = 1541 -# Argument type mismatch. +# invalid argument type in call to function '%s()' QUERY_FUNCTION_ARGUMENT_TYPE_MISMATCH = 1542 -# Invalid regex. +# invalid regex value QUERY_INVALID_REGEX = 1543 -# Invalid bind parameters. +# invalid structure of bind parameters QUERY_BIND_PARAMETERS_INVALID = 1550 -# Bind parameter missing. +# no value specified for declared bind parameter '%s' QUERY_BIND_PARAMETER_MISSING = 1551 -# Bind parameter undeclared. +# bind parameter '%s' was not declared in the query QUERY_BIND_PARAMETER_UNDECLARED = 1552 -# Invalid bind parameter value or type. +# bind parameter '%s' has an invalid value or type QUERY_BIND_PARAMETER_TYPE = 1553 -# Non-boolean value used in logical operation. -QUERY_INVALID_LOGICAL_VALUE = 1560 +# failed vector search +QUERY_VECTOR_SEARCH_NOT_APPLIED = 1554 -# Non-numeric value used in arithmetic operation. +# invalid arithmetic value QUERY_INVALID_ARITHMETIC_VALUE = 1561 -# Divide by zero. +# division by zero QUERY_DIVISION_BY_ZERO = 1562 -# Non-list operand used when expecting a list operand. +# array expected QUERY_ARRAY_EXPECTED = 1563 -# Collection is used as an operand in an AQL expression +# collection '%s' used as expression operand QUERY_COLLECTION_USED_IN_EXPRESSION = 1568 -# Function "FAIL()" called inside a query. +# FAIL(%s) called QUERY_FAIL_CALLED = 1569 -# Geo restriction specified but no suitable geo index found. +# no suitable geo index found for geo restriction on '%s' QUERY_GEO_INDEX_MISSING = 1570 -# Fulltext query performed on a collection without suitable fulltext index. +# no suitable fulltext index found for fulltext query on '%s' QUERY_FULLTEXT_INDEX_MISSING = 1571 -# Cannot convert value to a date. +# invalid date value QUERY_INVALID_DATE_VALUE = 1572 -# Query contains more than one data-modifying operation. +# multi-modify query QUERY_MULTI_MODIFY = 1573 -# Query contains an invalid aggregate expression. +# invalid aggregate expression QUERY_INVALID_AGGREGATE_EXPRESSION = 1574 -# Query contains options that cannot be resolved at query compile time. +# query options must be readable at query compile time QUERY_COMPILE_TIME_OPTIONS = 1575 -# Query contains an invalid options specification. -QUERY_EXCEPTION_OPTIONS = 1576 +# FILTER/PRUNE condition complexity is too high +QUERY_DNF_COMPLEXITY = 1576 -# Unusable index hint. +# could not use forced index hint QUERY_FORCED_INDEX_HINT_UNUSABLE = 1577 -# Dynamic function not allowed. +# disallowed dynamic call to '%s' QUERY_DISALLOWED_DYNAMIC_CALL = 1578 -# Collection data accessed after modification. +# access after data-modification by %s QUERY_ACCESS_AFTER_MODIFICATION = 1579 ############################ -# AQL User Function Errors # +# AQL user function errors # ############################ -# User function registered with invalid name. +# invalid user function name QUERY_FUNCTION_INVALID_NAME = 1580 -# User function registered with invalid code. +# invalid user function code QUERY_FUNCTION_INVALID_CODE = 1581 -# User function not found. +# user function '%s()' not found QUERY_FUNCTION_NOT_FOUND = 1582 -# Runtime exception raised by query function. +# user function runtime error: %s QUERY_FUNCTION_RUNTIME_ERROR = 1583 +# query is not eligible for plan caching +QUERY_NOT_ELIGIBLE_FOR_PLAN_CACHING = 1584 + ############################# -# AQL Query Registry Errors # +# AQL query registry errors # ############################# -# Query received an invalid JSON. +# bad execution plan JSON QUERY_BAD_JSON_PLAN = 1590 -# Query ID not found. +# query ID not found QUERY_NOT_FOUND = 1591 -# User provided expression does not evaluate to true. +# %s QUERY_USER_ASSERT = 1593 -# User provided expression does not evaluate to true. +# %s QUERY_USER_WARN = 1594 -# Window node is created after a data-modification operation. +# window operation after data-modification QUERY_WINDOW_AFTER_MODIFICATION = 1595 ########################## -# ArangoDB Cursor Errors # +# ArangoDB cursor errors # ########################## -# Cursor ID not found. +# cursor not found CURSOR_NOT_FOUND = 1600 -# Concurrent request still using the cursor. +# cursor is busy CURSOR_BUSY = 1601 -############################## -# ArangoDB Validation Errors # -############################## +##################################### +# ArangoDB schema validation errors # +##################################### -# Document does not pass schema validation. +# schema validation failed VALIDATION_FAILED = 1620 -# Schema description is invalid. +# invalid schema validation parameter VALIDATION_BAD_PARAMETER = 1621 ############################### -# ArangoDB Transaction Errors # +# ArangoDB transaction errors # ############################### -# Wrong usage of transactions. This is an internal error. +# internal transaction error TRANSACTION_INTERNAL = 1650 -# Nested transactions. +# nested transactions detected TRANSACTION_NESTED = 1651 -# Unregistered collection used in transaction. +# unregistered collection used in transaction TRANSACTION_UNREGISTERED_COLLECTION = 1652 -# Disallowed operation in transaction. +# disallowed operation inside transaction TRANSACTION_DISALLOWED_OPERATION = 1653 -# Transaction aborted. +# transaction aborted TRANSACTION_ABORTED = 1654 -# Transaction not found. +# transaction not found TRANSACTION_NOT_FOUND = 1655 ########################## -# User Management Errors # +# User management errors # ########################## -# Invalid username. +# invalid user name USER_INVALID_NAME = 1700 -# Username already exists. +# duplicate user USER_DUPLICATE = 1702 -# User not found. +# user not found USER_NOT_FOUND = 1703 -# User authenticated by an external server. +# user is external USER_EXTERNAL = 1705 ###################################### -# Service Management Errors (Legacy) # +# Service management errors (legacy) # ###################################### -# Cannot download service from central repository. +# service download failed SERVICE_DOWNLOAD_FAILED = 1752 -# Service upload from the client to the ArangoDB server failed. +# service upload failed SERVICE_UPLOAD_FAILED = 1753 ############### -# LDAP Errors # -############### - -# Cannot initialize an LDAP connection. -LDAP_CANNOT_INIT = 1800 - -# Cannot set an LDAP option. -LDAP_CANNOT_SET_OPTION = 1801 - -# Cannot bind to an LDAP server. -LDAP_CANNOT_BIND = 1802 - -# Cannot unbind from an LDAP server. -LDAP_CANNOT_UNBIND = 1803 - -# Cannot search the LDAP server. -LDAP_CANNOT_SEARCH = 1804 - -# Cannot start a TLS LDAP session. -LDAP_CANNOT_START_TLS = 1805 - -# LDAP did not find any objects with the specified search query. -LDAP_FOUND_NO_OBJECTS = 1806 - -# LDAP found zero or more than one user. -LDAP_NOT_ONE_USER_FOUND = 1807 - -# LDAP user not identified. -LDAP_USER_NOT_IDENTIFIED = 1808 - -# Cannot distinguish a valid mode for provided LDAP configuration. -LDAP_INVALID_MODE = 1820 - -############### -# Task Errors # +# Task errors # ############### -# Task created with an invalid ID. +# invalid task id TASK_INVALID_ID = 1850 -# Task created with a duplicate ID. +# duplicate task id TASK_DUPLICATE_ID = 1851 -# Task not found. +# task not found TASK_NOT_FOUND = 1852 ############################ -# Graph / Traversal Errors # +# Graph / traversal errors # ############################ -# Invalid name passed to the server. +# invalid graph GRAPH_INVALID_GRAPH = 1901 -# Invalid graph name passed to the server. -GRAPH_COULD_NOT_CREATE_GRAPH = 1902 - -# Invalid vertex ID passed to the server. -GRAPH_INVALID_VERTEX = 1903 - -# Vertex could not be created. -GRAPH_COULD_NOT_CREATE_VERTEX = 1904 - -# Vertex could not be changed. -GRAPH_COULD_NOT_CHANGE_VERTEX = 1905 - -# Invalid edge ID passed to the server. +# invalid edge GRAPH_INVALID_EDGE = 1906 -# Edge could not be created. -GRAPH_COULD_NOT_CREATE_EDGE = 1907 - -# Edge could not be changed. -GRAPH_COULD_NOT_CHANGE_EDGE = 1908 - -# Too many iterations in graph traversal. -GRAPH_TOO_MANY_ITERATIONS = 1909 - -# Invalid filter result returned in graph traversal. +# invalid filter result GRAPH_INVALID_FILTER_RESULT = 1910 -# Edge collection may only be used once in an edge definition. +# multi use of edge collection in edge def GRAPH_COLLECTION_MULTI_USE = 1920 -# Collection already used by another graph in a different edge definition. +# edge collection already used in edge def GRAPH_COLLECTION_USE_IN_MULTI_GRAPHS = 1921 -# Graph name missing. +# missing graph name GRAPH_CREATE_MISSING_NAME = 1922 -# Edge definition malformed (must be a list of dicts). +# malformed edge definition GRAPH_CREATE_MALFORMED_EDGE_DEFINITION = 1923 -# Graph not found. +# graph '%s' not found GRAPH_NOT_FOUND = 1924 -# Graph name already exists. +# graph already exists GRAPH_DUPLICATE = 1925 -# Vertex collection does not exist or is not part of the graph. +# vertex collection does not exist or is not part of the graph GRAPH_VERTEX_COL_DOES_NOT_EXIST = 1926 -# Collection not a vertex collection. +# collection not a vertex collection GRAPH_WRONG_COLLECTION_TYPE_VERTEX = 1927 -# Vertex collection not in orphan collections of the graph. +# collection is not in list of orphan collections GRAPH_NOT_IN_ORPHAN_COLLECTION = 1928 -# Collection already used in an edge definition of the graph. +# collection already used in edge def GRAPH_COLLECTION_USED_IN_EDGE_DEF = 1929 -# Edge collection not used in any edge definition of the graph. +# edge collection not used in graph GRAPH_EDGE_COLLECTION_NOT_USED = 1930 -# Collection "_graphs" does not exist. +# collection _graphs does not exist GRAPH_NO_GRAPH_COLLECTION = 1932 -# Invalid example array object string. -GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT_STRING = 1933 - -# Invalid example type (must be a list or dict). -GRAPH_INVALID_EXAMPLE_ARRAY_OBJECT = 1934 - -# Invalid number of arguments. +# Invalid number of arguments. Expected: GRAPH_INVALID_NUMBER_OF_ARGUMENTS = 1935 # Invalid parameter type. GRAPH_INVALID_PARAMETER = 1936 -# Invalid ID. -GRAPH_INVALID_ID = 1937 - -# Collection already in orphans of the graph. +# collection used in orphans GRAPH_COLLECTION_USED_IN_ORPHANS = 1938 -# Edge collection does not exist or is not part of the graph. +# edge collection does not exist or is not part of the graph GRAPH_EDGE_COL_DOES_NOT_EXIST = 1939 -# Graph has no edge collections. +# empty graph GRAPH_EMPTY = 1940 -# Invalid data in "_graphs" collection. +# internal graph data corrupt GRAPH_INTERNAL_DATA_CORRUPT = 1941 -# Edge collection already defined. -GRAPH_INTERNAL_EDGE_COLLECTION_ALREADY_SET = 1942 +# must not drop collection while part of graph +GRAPH_MUST_NOT_DROP_COLLECTION = 1942 -# Orphan list argument malformed. Must be a list of strings. +# malformed orphan list GRAPH_CREATE_MALFORMED_ORPHAN_LIST = 1943 -# Collection used as a relation exists. +# edge definition collection is a document collection GRAPH_EDGE_DEFINITION_IS_DOCUMENT = 1944 -# The collection is used as the initial collection of this graph and is not allowed to -# be removed manually. +# initial collection is not allowed to be removed manually GRAPH_COLLECTION_IS_INITIAL = 1945 -# During the graph creation process no collection could be selected as the needed -# initial collection. Happens if a distributeShardsLike or replicationFactor mismatch -# was found. +# no valid initial collection found GRAPH_NO_INITIAL_COLLECTION = 1946 -# The _from or _to collection specified for the edge refers to a vertex collection which -# is not used in any edge definition of the graph. -GRAPH_REFERENCED_VERTEX_COLLECTION_NOT_USED = 1947 +# referenced vertex collection is not part of the graph +GRAPH_REFERENCED_VERTEX_COLLECTION_NOT_PART_OF_THE_GRAPH = 1947 -# Negative edge weight found during a weighted graph traversal or shortest path query. +# negative edge weight found GRAPH_NEGATIVE_EDGE_WEIGHT = 1948 +# the given collection is not part of the graph +GRAPH_COLLECTION_NOT_PART_OF_THE_GRAPH = 1949 + ################## -# Session Errors # +# Session errors # ################## -# Invalid/unknown session ID passed to the server. +# unknown session SESSION_UNKNOWN = 1950 -# Session expired. +# session expired SESSION_EXPIRED = 1951 ######################## -# Simple Client Errors # +# Simple Client errors # ######################## -# This error should not happen. +# unknown client error SIMPLE_CLIENT_UNKNOWN_ERROR = 2000 -# Client could not connect to server. +# could not connect to server SIMPLE_CLIENT_COULD_NOT_CONNECT = 2001 -# Client could not write data. +# could not write to server SIMPLE_CLIENT_COULD_NOT_WRITE = 2002 -# Client could not read data. +# could not read from server SIMPLE_CLIENT_COULD_NOT_READ = 2003 -# Will be raised if was erlaube?! +# was erlaube?! WAS_ERLAUBE = 2019 ####################### -# Communicator Errors # +# internal AQL errors # ####################### -# Communicator request aborted. -COMMUNICATOR_REQUEST_ABORTED = 2100 - -# Communicator disabled. -COMMUNICATOR_DISABLED = 2101 - -####################### -# Internal AQL errors # -####################### - -# Internal error during AQL execution. +# General internal AQL error INTERNAL_AQL = 2200 -# AQL block wrote in too few output registers. -WROTE_TOO_FEW_OUTPUT_REGISTERS = 2201 - -# AQL block wrote in too many output registers. -WROTE_TOO_MANY_OUTPUT_REGISTERS = 2202 - -# AQL block wrote in an output register twice. -WROTE_OUTPUT_REGISTER_TWICE = 2203 - -# AQL block wrote in a register that is not its output. -WROTE_IN_WRONG_REGISTER = 2204 - -# AQL block did not copy its input registers. -INPUT_REGISTERS_NOT_COPIED = 2205 - ########################## -# Foxx Management Errors # +# Foxx management errors # ########################## -# Service manifest file not a well-formed JSON. +# failed to parse manifest file MALFORMED_MANIFEST_FILE = 3000 -# Service manifest contains invalid values. +# manifest file is invalid INVALID_SERVICE_MANIFEST = 3001 -# Service folder or bundle does not exist on the server. +# service files missing SERVICE_FILES_MISSING = 3002 -# Local service bundle does not match the checksum in the database. +# service files outdated SERVICE_FILES_OUTDATED = 3003 -# Service options contain invalid values. +# service options are invalid INVALID_FOXX_OPTIONS = 3004 -# Service mountpath contains invalid characters. +# invalid mountpath INVALID_MOUNTPOINT = 3007 -# No service found at given mountpath. +# service not found SERVICE_NOT_FOUND = 3009 -# Service missing configuration or dependencies. +# service needs configuration SERVICE_NEEDS_CONFIGURATION = 3010 -# Service already exists at given mountpath. +# service already exists SERVICE_MOUNTPOINT_CONFLICT = 3011 -# Service directory does not contain a manifest file. +# missing manifest file SERVICE_MANIFEST_NOT_FOUND = 3012 -# Service options are not well-formed JSONs. +# failed to parse service options SERVICE_OPTIONS_MALFORMED = 3013 -# Source path does not match a file or directory. +# source path not found SERVICE_SOURCE_NOT_FOUND = 3014 -# Source path could not be resolved. +# error resolving source SERVICE_SOURCE_ERROR = 3015 -# Unknown service script. +# unknown script SERVICE_UNKNOWN_SCRIPT = 3016 -# API for managing Foxx services disabled. +# service api disabled SERVICE_API_DISABLED = 3099 ################################### -# JavaScript Module Loader Errors # +# JavaScript module loader errors # ################################### -# Cannot resolve module path. +# cannot locate module MODULE_NOT_FOUND = 3100 -# Module could not be parsed because of a syntax error. +# syntax error in module MODULE_SYNTAX_ERROR = 3101 -# Failed to invoke the module in its context. +# failed to invoke module MODULE_FAILURE = 3103 -##################### -# Enterprise Errors # -##################### +############################# +# Enterprise Edition errors # +############################# -# Requested collection needs to be smart. +# collection is not smart NO_SMART_COLLECTION = 4000 -# Given document does not have the smart graph attribute set. +# smart graph attribute not given NO_SMART_GRAPH_ATTRIBUTE = 4001 -# Smart collection cannot be dropped. +# cannot drop this smart collection CANNOT_DROP_SMART_COLLECTION = 4002 -# "_key" not prefixed with the value of the smart graph attribute. +# in smart vertex collections _key must be a string and prefixed with the value of the smart graph attribute KEY_MUST_BE_PREFIXED_WITH_SMART_GRAPH_ATTRIBUTE = 4003 -# Given smart graph attribute is illegal and cannot be used for sharding. +# attribute cannot be used as smart graph attribute ILLEGAL_SMART_GRAPH_ATTRIBUTE = 4004 -# Smart graph attribute of collection does not match the attribute of graph. +# smart graph attribute mismatch SMART_GRAPH_ATTRIBUTE_MISMATCH = 4005 -# Invalid smart join attribute declaration. +# invalid smart join attribute declaration INVALID_SMART_JOIN_ATTRIBUTE = 4006 -# Key must be prefixed with smart join attribute. +# shard key value must be prefixed with the value of the smart join attribute KEY_MUST_BE_PREFIXED_WITH_SMART_JOIN_ATTRIBUTE = 4007 -# Document lacks required smart join attribute. +# smart join attribute not given or invalid NO_SMART_JOIN_ATTRIBUTE = 4008 -# Cannot update the value of the smart join attribute. +# must not change the value of the smartJoinAttribute CLUSTER_MUST_NOT_CHANGE_SMART_JOIN_ATTRIBUTE = 4009 -# There was an attempt to create an edge between separated graph components. +# non disjoint edge found INVALID_DISJOINT_SMART_EDGE = 4010 -# Switching back and forth between Satellite and Smart in Disjoint SmartGraph is not -# supported within a single AQL statement. Split into multiple statements. +# Unsupported alternating Smart and Satellite in Disjoint SmartGraph. UNSUPPORTED_CHANGE_IN_SMART_TO_SATELLITE_DISJOINT_EDGE_DIRECTION = 4011 -######################### -# Cluster Repair Errors # -######################### - -# General error during cluster repairs. -CLUSTER_REPAIRS_FAILED = 5000 - -# Cluster repairs not healthy enough. -CLUSTER_REPAIRS_NOT_ENOUGH_HEALTHY = 5001 - -# Raised on various inconsistencies regarding the replication factor. -CLUSTER_REPAIRS_REPLICATION_FACTOR_VIOLATED = 5002 - -# Repaired collection has some shards without DBServers. -CLUSTER_REPAIRS_NO_DBSERVERS = 5003 - -# Shard in collection and its prototype in the corresponding "shard_like" -# collection have mismatching leaders. -CLUSTER_REPAIRS_MISMATCHING_LEADERS = 5004 - -# Shard in collection and its prototype in the corresponding "shard_like" -# collection don't have the same followers. -CLUSTER_REPAIRS_MISMATCHING_FOLLOWERS = 5005 - -# Repaired collection does not have "shard_like" as expected. -CLUSTER_REPAIRS_INCONSISTENT_ATTRIBUTES = 5006 - -# Collection and its "shard_like" prototype have unequal number of DBServers. -CLUSTER_REPAIRS_MISMATCHING_SHARDS = 5007 - -# Move shard job failed during cluster repairs. -CLUSTER_REPAIRS_JOB_FAILED = 5008 - -# Move shard job disappeared before finishing. -CLUSTER_REPAIRS_JOB_DISAPPEARED = 5009 - -# Agency transaction failed during either sending or executing it. -CLUSTER_REPAIRS_OPERATION_FAILED = 5010 - ################# -# Agency Errors # +# Agency errors # ################# -# Malformed gossip message. +# malformed gossip message AGENCY_MALFORMED_GOSSIP_MESSAGE = 20001 -# Malformed inquire request. +# malformed inquire request AGENCY_MALFORMED_INQUIRE_REQUEST = 20002 # Inform message must be an object. AGENCY_INFORM_MUST_BE_OBJECT = 20011 -# Inform message must contain a uint parameter 'term'. +# Inform message must contain uint parameter 'term' AGENCY_INFORM_MUST_CONTAIN_TERM = 20012 -# Inform message must contain a string parameter 'ID'. +# Inform message must contain string parameter 'id' AGENCY_INFORM_MUST_CONTAIN_ID = 20013 -# Inform message must contain an array 'active'. +# Inform message must contain array 'active' AGENCY_INFORM_MUST_CONTAIN_ACTIVE = 20014 -# Inform message must contain an object 'pool'. +# Inform message must contain object 'pool' AGENCY_INFORM_MUST_CONTAIN_POOL = 20015 -# Inform message must contain an object 'min ping'. +# Inform message must contain object 'min ping' AGENCY_INFORM_MUST_CONTAIN_MIN_PING = 20016 -# Inform message must contain an object 'max ping'. +# Inform message must contain object 'max ping' AGENCY_INFORM_MUST_CONTAIN_MAX_PING = 20017 -# Inform message must contain an object 'timeoutMult'. +# Inform message must contain object 'timeoutMult' AGENCY_INFORM_MUST_CONTAIN_TIMEOUT_MULT = 20018 -# Cannot rebuild readDB or the spearHead from replicated log. +# Cannot rebuild readDB and spearHead AGENCY_CANNOT_REBUILD_DBS = 20021 -# Malformed agency transaction. +# malformed agency transaction AGENCY_MALFORMED_TRANSACTION = 20030 ###################### -# Supervision Errors # +# Supervision errors # ###################### -# General supervision failure. +# general supervision failure SUPERVISION_GENERAL_FAILURE = 20501 #################### -# Scheduler Errors # +# Scheduler errors # #################### -# Queue is full. +# queue is full QUEUE_FULL = 21003 -# Request with a queue time requirement is set and cannot be fulfilled. +# queue time violated QUEUE_TIME_REQUIREMENT_VIOLATED = 21004 +# too many detached scheduler threads +TOO_MANY_DETACHED_THREADS = 21005 + ###################### -# Maintenance Errors # +# Maintenance errors # ###################### -# Maintenance action cannot be stopped once started. -ACTION_OPERATION_UNABORTABLE = 6002 - -# This maintenance action is still processing. +# maintenance action still processing ACTION_UNFINISHED = 6003 -# No such maintenance action exists. -NO_SUCH_ACTION = 6004 - ######################### -# Backup/Restore Errors # +# Backup/Restore errors # ######################### -# Failed to create hot backup set. +# internal hot backup error HOT_BACKUP_INTERNAL = 7001 -# Failed to restore to hot backup set. +# internal hot restore error HOT_RESTORE_INTERNAL = 7002 -# The hot backup set cannot be restored on non-matching cluster topology. +# backup does not match this topology BACKUP_TOPOLOGY = 7003 -# No space left on device. +# no space left on device NO_SPACE_LEFT_ON_DEVICE = 7004 -# Failed to upload hot backup set to remote target. +# failed to upload hot backup set to remote target FAILED_TO_UPLOAD_BACKUP = 7005 -# Failed to download hot backup set from remote source. +# failed to download hot backup set from remote source FAILED_TO_DOWNLOAD_BACKUP = 7006 -# Cannot find a hot backup set with this ID. +# no such hot backup set can be found NO_SUCH_HOT_BACKUP = 7007 -# Invalid remote repository configuration. +# remote hotback repository configuration error REMOTE_REPOSITORY_CONFIG_BAD = 7008 -# Some DB servers cannot be reached for transaction locks. +# some db servers cannot be reached for transaction locks LOCAL_LOCK_FAILED = 7009 -# Some DB servers cannot be reached for transaction locks. +# some db servers cannot be reached for transaction locks LOCAL_LOCK_RETRY = 7010 -# Conflict of multiple hot backup processes. +# hot backup conflict HOT_BACKUP_CONFLICT = 7011 -# One or more db servers could not be reached for hot backup inquiry. +# hot backup not all db servers reachable HOT_BACKUP_DBSERVERS_AWOL = 7012 -######################## -# Plan Analyzer Errors # -######################## - -# Plan could not be modified while creating or deleting Analyzers revision. -ERROR_CLUSTER_COULD_NOT_MODIFY_ANALYZERS_IN_PLAN = 7021 - -############## -# AIR Errors # -############## +######################### +# Plan Analyzers errors # +######################### -# During the execution of an AIR program an error occurred. -AIR_EXECUTION_ERROR = 8001 +# analyzers in plan could not be modified +CLUSTER_COULD_NOT_MODIFY_ANALYZERS_IN_PLAN = 7021 ############# # Licensing # ############# -# The license has expired or is invalid. +# license has expired or is invalid LICENSE_EXPIRED_OR_INVALID = 9001 -# Verification of license failed. +# license verification failed LICENSE_SIGNATURE_VERIFICATION = 9002 -# The ID of the license does not match the ID of this instance. +# non-matching license id LICENSE_NON_MATCHING_ID = 9003 -# The installed license does not cover this feature. +# feature is not enabled by the license LICENSE_FEATURE_NOT_ENABLED = 9004 -# The installed license does not cover a higher number of this resource. +# the resource is exhausted LICENSE_RESOURCE_EXHAUSTED = 9005 -# The license does not hold features of an ArangoDB license. +# invalid license LICENSE_INVALID = 9006 -# The license has one or more inferior features. +# conflicting license LICENSE_CONFLICT = 9007 -# Could not verify the license’s signature. +# failed to validate license signature LICENSE_VALIDATION_FAILED = 9008 diff --git a/arango/request.py b/arango/request.py index 5bba6242..41a0ac66 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.1.4" + driver_version = "8.1.5" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/arango/utils.py b/arango/utils.py index 0d128db3..89a0eca5 100644 --- a/arango/utils.py +++ b/arango/utils.py @@ -9,7 +9,7 @@ import json import logging from contextlib import contextmanager -from typing import Any, Iterator, Sequence, Union +from typing import Any, Iterator, Optional, Sequence, Union from arango.exceptions import DocumentParseError, SortValidationError from arango.typings import Json, Jsons @@ -148,7 +148,7 @@ def validate_sort_parameters(sort: Sequence[Json]) -> bool: return True -def build_sort_expression(sort: Jsons | None) -> str: +def build_sort_expression(sort: Optional[Jsons]) -> str: """Build a sort condition for an AQL query. :param sort: Document sort parameters. diff --git a/docs/conf.py b/docs/conf.py index 5d380f14..361d1fbe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,7 +4,7 @@ sys.path.insert(0, os.path.abspath("..")) project = "python-arango" -copyright = "2016-2024, Joohwan Oh" +copyright = "2016-2025, Joohwan Oh" author = "Joohwan Oh" extensions = [ "sphinx_rtd_theme", diff --git a/docs/index.rst b/docs/index.rst index 09f96f51..06955e0d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,7 +11,7 @@ Requirements ============= - ArangoDB version 3.11+ -- Python version 3.8+ +- Python version 3.9+ Installation ============ diff --git a/pyproject.toml b/pyproject.toml index d3930e0a..8ed943be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ keywords = ["arangodb", "python", "driver"] readme = "README.md" dynamic = ["version"] license = { file = "LICENSE" } -requires-python = ">=3.8" +requires-python = ">=3.9" classifiers = [ "Intended Audience :: Developers", @@ -26,7 +26,6 @@ classifiers = [ "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/setup.cfg b/setup.cfg index e600ca8d..846ab7fb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [flake8] max-line-length = 88 extend-ignore = E203, E741, W503 -exclude =.git .idea .*_cache dist htmlcov venv +exclude =.git .idea .*_cache dist htmlcov venv arango/errno.py per-file-ignores = __init__.py:F401 diff --git a/tests/test_aql.py b/tests/test_aql.py index 885d6a4e..6c1d3ea3 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -1,5 +1,7 @@ +import pytest from packaging import version +from arango.errno import FORBIDDEN from arango.exceptions import ( AQLCacheClearError, AQLCacheConfigureError, @@ -346,6 +348,86 @@ def test_aql_function_management(db, bad_db): assert db.aql.functions() == [] +def test_cache_results_management(db, bad_db, col, docs, cluster): + if cluster: + pytest.skip("Cluster mode does not support query result cache management") + + aql = db.aql + cache = aql.cache + + # Sanity check, just see if the response is OK. + _ = cache.properties() + with pytest.raises(AQLCachePropertiesError) as err: + _ = bad_db.aql.cache.properties() + assert err.value.error_code == FORBIDDEN + + # Turn on caching + result = cache.configure(mode="on") + assert result["mode"] == "on" + result = cache.properties() + assert result["mode"] == "on" + with pytest.raises(AQLCacheConfigureError) as err: + _ = bad_db.aql.cache.configure(mode="on") + assert err.value.error_code == FORBIDDEN + + # Run a simple query to use the cache + col.insert(docs[0]) + _ = aql.execute( + query="FOR doc IN @@collection RETURN doc", + bind_vars={"@collection": col.name}, + cache=True, + ) + + # Check the entries + entries = cache.entries() + assert isinstance(entries, list) + assert len(entries) > 0 + + with pytest.raises(AQLCacheEntriesError) as err: + _ = bad_db.aql.cache.entries() + assert err.value.error_code == FORBIDDEN + + # Clear the cache + cache.clear() + entries = cache.entries() + assert len(entries) == 0 + with pytest.raises(AQLCacheClearError) as err: + bad_db.aql.cache.clear() + assert err.value.error_code == FORBIDDEN + + +def test_cache_plan_management(db, bad_db, col, docs, db_version): + if db_version < version.parse("3.12.4"): + pytest.skip("Query plan cache is supported in ArangoDB 3.12.4+") + + aql = db.aql + cache = aql.cache + + # Run a simple query to use the cache + col.insert(docs[0]) + _ = aql.execute( + query="FOR doc IN @@collection RETURN doc", + bind_vars={"@collection": col.name}, + use_plan_cache=True, + ) + + # Check the entries + entries = cache.plan_entries() + assert isinstance(entries, list) + assert len(entries) > 0 + with pytest.raises(AQLCacheEntriesError) as err: + _ = bad_db.aql.cache.plan_entries() + assert err.value.error_code == FORBIDDEN + + # Clear the cache + cache.clear_plan() + entries = cache.plan_entries() + assert len(entries) == 0 + with pytest.raises(AQLCacheClearError) as err: + bad_db.aql.cache.clear_plan() + assert err.value.error_code == FORBIDDEN + + def test_aql_cache_management(db, bad_db): # Test get AQL cache properties properties = db.aql.cache.properties() From adf29aeaf0459ba30661fb925701d6ab6defff48 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 24 Feb 2025 23:09:55 +0530 Subject: [PATCH 06/33] Updating python version badge (#363) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ad5dee47..f76c608e 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ [![Last commit](https://img.shields.io/github/last-commit/arangodb/python-arango)](https://github.com/arangodb/python-arango/commits/main) [![PyPI version badge](https://img.shields.io/pypi/v/python-arango?color=3775A9&style=for-the-badge&logo=pypi&logoColor=FFD43B)](https://pypi.org/project/python-arango/) -[![Python versions badge](https://img.shields.io/badge/3.8%2B-3776AB?style=for-the-badge&logo=python&logoColor=FFD43B&label=Python)](https://pypi.org/project/python-arango/) +[![Python versions badge](https://img.shields.io/badge/3.9%2B-3776AB?style=for-the-badge&logo=python&logoColor=FFD43B&label=Python)](https://pypi.org/project/python-arango/) [![License](https://img.shields.io/github/license/arangodb/python-arango?color=9E2165&style=for-the-badge)](https://github.com/arangodb/python-arango/blob/main/LICENSE) [![Code style: black](https://img.shields.io/static/v1?style=for-the-badge&label=code%20style&message=black&color=black)](https://github.com/psf/black) From 33e09e11c166d26c6c92b64b13549fab532694e7 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 28 Feb 2025 13:08:38 +0530 Subject: [PATCH 07/33] Removing "cancelled" response (#364) * Removing "cancelled" response * Improved docstring --- arango/job.py | 6 +++--- docs/async.rst | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/arango/job.py b/arango/job.py index d5065d04..85c96bc8 100644 --- a/arango/job.py +++ b/arango/job.py @@ -60,10 +60,10 @@ def status(self) -> str: fail. :return: Async job status. Possible values are "pending" (job is still - in queue), "done" (job finished or raised an error), or "cancelled" - (job was cancelled before completion). + in queue), "done" (job finished or raised an error). :rtype: str - :raise arango.exceptions.AsyncJobStatusError: If retrieval fails. + :raise arango.exceptions.AsyncJobStatusError: If retrieval fails or + job is not found. """ request = Request(method="get", endpoint=f"/_api/job/{self._id}") resp = self._conn.send_request(request) diff --git a/docs/async.rst b/docs/async.rst index 82690b29..5e480248 100644 --- a/docs/async.rst +++ b/docs/async.rst @@ -45,8 +45,8 @@ the results can be retrieved once available via :ref:`AsyncJob` objects. # Retrieve the status of each async job. for job in [job1, job2, job3, job4]: - # Job status can be "pending", "done" or "cancelled". - assert job.status() in {'pending', 'done', 'cancelled'} + # Job status can be "pending" or "done". + assert job.status() in {'pending', 'done'} # Let's wait until the jobs are finished. while job.status() != 'done': From 5724ae6311aeaf94eceb2085eaa71611987aba14 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 28 Feb 2025 13:10:09 +0530 Subject: [PATCH 08/33] Cleaning up unused 3.10 files. (#365) --- tests/static/cluster-3.10.conf | 13 ------------- tests/static/single-3.10.conf | 11 ----------- 2 files changed, 24 deletions(-) delete mode 100644 tests/static/cluster-3.10.conf delete mode 100644 tests/static/single-3.10.conf diff --git a/tests/static/cluster-3.10.conf b/tests/static/cluster-3.10.conf deleted file mode 100644 index d7732c90..00000000 --- a/tests/static/cluster-3.10.conf +++ /dev/null @@ -1,13 +0,0 @@ -[starter] -mode = cluster -local = true -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.log.api-enabled = true -all.javascript.allow-admin-execute = true diff --git a/tests/static/single-3.10.conf b/tests/static/single-3.10.conf deleted file mode 100644 index 09d1d9f3..00000000 --- a/tests/static/single-3.10.conf +++ /dev/null @@ -1,11 +0,0 @@ -[starter] -mode = single -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.javascript.allow-admin-execute = true From 04151f7d3f8ee92deb7939a57a5e522ff92e1232 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 28 Feb 2025 13:11:49 +0530 Subject: [PATCH 09/33] Updating certificates documentation (#366) --- arango/client.py | 10 ++++++---- docs/certificates.rst | 18 ++++++++++++++++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/arango/client.py b/arango/client.py index 12f2bf11..b56755b0 100644 --- a/arango/client.py +++ b/arango/client.py @@ -79,10 +79,12 @@ class ArangoClient: :type deserializer: callable :param verify_override: Override TLS certificate verification. This will override the verify method of the underlying HTTP client. - None: Do not change the verification behavior of the underlying HTTP client. - True: Verify TLS certificate using the system CA certificates. - False: Do not verify TLS certificate. - str: Path to a custom CA bundle file or directory. + + - `None`: Do not change the verification behavior of the + underlying HTTP client. + - `True`: Verify TLS certificate using the system CA certificates. + - `False`: Do not verify TLS certificate. + - `str`: Path to a custom CA bundle file or directory. :type verify_override: Union[bool, str, None] :param request_timeout: This is the default request timeout (in seconds) for http requests issued by the client if the parameter http_client is diff --git a/docs/certificates.rst b/docs/certificates.rst index 6440df20..e6ffedbc 100644 --- a/docs/certificates.rst +++ b/docs/certificates.rst @@ -17,8 +17,7 @@ your HTTP client as described in the :ref:`HTTPClients` section. The ``ArangoClient`` class provides an option to override the verification behavior, no matter what has been defined in the underlying HTTP session. -You can use this option to disable verification or provide a custom CA bundle without -defining a custom HTTP Client. +You can use this option to disable verification. .. code-block:: python @@ -34,3 +33,18 @@ application: import requests requests.packages.urllib3.disable_warnings() + +You can also provide a custom CA bundle without defining a custom HTTP Client: + +.. code-block:: python + + client = ArangoClient(hosts="https://localhost:8529", verify_override="path/to/certfile") + +If `verify_override` is set to a path to a directory, the directory must have been processed using the `c_rehash` utility +supplied with OpenSSL. For more information, see the `requests documentation `_. + +Setting `verify_override` to `True` will use the system's default CA bundle. + +.. code-block:: python + + client = ArangoClient(hosts="https://localhost:8529", verify_override=True) From 173f2b367d68b5f5c7edf793a09fe00334a8791f Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 28 Mar 2025 10:34:12 +0530 Subject: [PATCH 10/33] Collection Maintenance (#367) * Adding missing truncate parameters * Removing unused parameter * Minor adjustments * Removing note that is no longer relevant. * Update arango/collection.py Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> * Update arango/collection.py Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --------- Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --- arango/collection.py | 85 +++++++++------------------------------- arango/utils.py | 8 ++-- tests/test_collection.py | 2 + 3 files changed, 24 insertions(+), 71 deletions(-) diff --git a/arango/collection.py b/arango/collection.py index e2dfcd2a..7f79fb3f 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -566,15 +566,31 @@ def response_handler(resp: Response) -> bool: return self._execute(request, response_handler) - def truncate(self) -> Result[bool]: + def truncate( + self, + sync: Optional[bool] = None, + compact: Optional[bool] = None, + ) -> Result[bool]: """Delete all documents in the collection. + :param sync: Block until deletion operation is synchronized to disk. + :type sync: bool | None + :param compact: Whether to compact the collection after truncation. + :type compact: bool | None :return: True if collection was truncated successfully. :rtype: bool :raise arango.exceptions.CollectionTruncateError: If operation fails. """ + params: Json = {} + if sync is not None: + params["waitForSync"] = sync + if compact is not None: + params["compact"] = compact + request = Request( - method="put", endpoint=f"/_api/collection/{self.name}/truncate" + method="put", + endpoint=f"/_api/collection/{self.name}/truncate", + params=params, ) def response_handler(resp: Response) -> bool: @@ -1747,14 +1763,6 @@ def insert_many( successfully (returns document metadata) and which were not (returns exception object). - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single insert - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param documents: List of new documents to insert. If they contain the "_key" or "_id" fields, the values are used as the keys of the new documents (auto-generated otherwise). Any "_rev" field is ignored. @@ -1876,14 +1884,6 @@ def update_many( (returns exception object). Alternatively, you can rely on setting **raise_on_document_error** to True (defaults to False). - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single update - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param documents: Partial or full documents with the updated values. They must contain the "_id" or "_key" fields. :type documents: [dict] @@ -1995,14 +1995,6 @@ def update_match( ) -> Result[int]: """Update matching documents. - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single update - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param filters: Document filters. :type filters: dict :param body: Full or partial document body with the updates. @@ -2085,14 +2077,6 @@ def replace_many( successfully (returns document metadata) and which were not (returns exception object). - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single replace - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param documents: New documents to replace the old ones with. They must contain the "_id" or "_key" fields. Edge documents must also have "_from" and "_to" fields. @@ -2187,14 +2171,6 @@ def replace_match( ) -> Result[int]: """Replace matching documents. - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single replace - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param filters: Document filters. :type filters: dict :param body: New document body. @@ -2263,14 +2239,6 @@ def delete_many( successfully (returns document metadata) and which were not (returns exception object). - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single delete - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param documents: Document IDs, keys or bodies. Document bodies must contain the "_id" or "_key" fields. :type documents: [str | dict] @@ -2354,14 +2322,6 @@ def delete_match( ) -> Result[int]: """Delete matching documents. - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single delete - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param filters: Document filters. :type filters: dict :param limit: Max number of documents to delete. If the limit is lower @@ -2428,14 +2388,6 @@ def import_bulk( This method is faster than :func:`arango.collection.Collection.insert_many` but does not return as many details. - .. note:: - - In edge/vertex collections, this method does NOT provide the - transactional guarantees and validations that single insert - operation does for graphs. If these properties are required, see - :func:`arango.database.StandardDatabase.begin_batch_execution` - for an alternative approach. - :param documents: List of new documents to insert. If they contain the "_key" or "_id" fields, the values are used as the keys of the new documents (auto-generated otherwise). Any "_rev" field is ignored. @@ -2757,7 +2709,6 @@ def update( "returnNew": return_new, "returnOld": return_old, "ignoreRevs": not check_rev, - "overwrite": not check_rev, "silent": silent, } if sync is not None: diff --git a/arango/utils.py b/arango/utils.py index 89a0eca5..822bc736 100644 --- a/arango/utils.py +++ b/arango/utils.py @@ -64,11 +64,11 @@ def get_doc_id(doc: Union[str, Json]) -> str: def is_none_or_int(obj: Any) -> bool: - """Check if obj is None or an integer. + """Check if obj is None or a positive integer. :param obj: Object to check. :type obj: Any - :return: True if object is None or an integer. + :return: True if object is None or a positive integer. :rtype: bool """ return obj is None or (isinstance(obj, int) and obj >= 0) @@ -128,11 +128,11 @@ def build_filter_conditions(filters: Json) -> str: return "FILTER " + " AND ".join(conditions) -def validate_sort_parameters(sort: Sequence[Json]) -> bool: +def validate_sort_parameters(sort: Jsons) -> bool: """Validate sort parameters for an AQL query. :param sort: Document sort parameters. - :type sort: Sequence[Json] + :type sort: Jsons :return: Validation success. :rtype: bool :raise arango.exceptions.SortValidationError: If sort parameters are invalid. diff --git a/tests/test_collection.py b/tests/test_collection.py index 7ab72800..c11a6541 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -136,6 +136,8 @@ def test_collection_misc_methods(col, bad_col, cluster): # Test truncate collection assert col.truncate() is True assert len(col) == 0 + assert col.truncate(sync=True, compact=False) is True + assert len(col) == 0 # Test truncate with bad collection with assert_raises(CollectionTruncateError) as err: From 6e5f8f12c66efe2db9068fc5323c0bc7fca32c6c Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 30 Mar 2025 05:36:08 +0530 Subject: [PATCH 11/33] Mentioning async driver (#368) --- README.md | 2 ++ arango/request.py | 2 +- docs/index.rst | 3 +++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f76c608e..d4b995fd 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,8 @@ Python driver for [ArangoDB](https://www.arangodb.com), a scalable multi-model database natively supporting documents, graphs and search. +If you're interested in using asyncio, please check [python-arango-async](https://github.com/arangodb/python-arango-async). + ## Requirements - ArangoDB version 3.11+ diff --git a/arango/request.py b/arango/request.py index 41a0ac66..abb2b0db 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.1.5" + driver_version = "8.1.7" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/docs/index.rst b/docs/index.rst index 06955e0d..4856e1b9 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,6 +7,8 @@ Python-Arango Welcome to the documentation for **python-arango**, a Python driver for ArangoDB_. +If you're interested in using asyncio, please check python-arango-async_ driver. + Requirements ============= @@ -96,3 +98,4 @@ Development specs .. _ArangoDB: https://www.arangodb.com +.. _python-arango-async: https://python-arango-async.readthedocs.io From c6e923b0fda3cf797b83197e2965fd385e311971 Mon Sep 17 00:00:00 2001 From: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> Date: Mon, 31 Mar 2025 10:01:30 -0400 Subject: [PATCH 12/33] new: `raise_on_document_error` for `insert_many` & `delete_many` (#369) --- arango/collection.py | 26 ++++++++++++++++++++++++-- tests/test_document.py | 12 ++++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/arango/collection.py b/arango/collection.py index 7f79fb3f..a996dc5c 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -1752,6 +1752,7 @@ def insert_many( merge: Optional[bool] = None, refill_index_caches: Optional[bool] = None, version_attribute: Optional[str] = None, + raise_on_document_error: bool = False, ) -> Result[Union[bool, List[Union[Json, ArangoServerError]]]]: """Insert multiple documents. @@ -1761,7 +1762,8 @@ def insert_many( returned as an object in the result list. It is up to you to inspect the list to determine which documents were inserted successfully (returns document metadata) and which were not - (returns exception object). + (returns exception object). Alternatively, you can rely on + setting **raise_on_document_error** to True (defaults to False). :param documents: List of new documents to insert. If they contain the "_key" or "_id" fields, the values are used as the keys of the new @@ -1801,6 +1803,11 @@ def insert_many( :param version_attribute: support for simple external versioning to document operations. :type version_attribute: str + :param raise_on_document_error: Whether to raise if a DocumentRevisionError + or a DocumentInsertError is encountered on an individual document, + as opposed to returning the error as an object in the result list. + Defaults to False. + :type raise_on_document_error: bool :return: List of document metadata (e.g. document keys, revisions) and any exception, or True if parameter **silent** was set to True. :rtype: [dict | ArangoServerError] | bool @@ -1853,7 +1860,12 @@ def response_handler( results.append(body) else: sub_resp = self._conn.prep_bulk_err_response(resp, body) - results.append(DocumentInsertError(sub_resp, request)) + error = DocumentInsertError(sub_resp, request) + + if raise_on_document_error: + raise error + + results.append(error) return results @@ -2228,6 +2240,7 @@ def delete_many( sync: Optional[bool] = None, silent: bool = False, refill_index_caches: Optional[bool] = None, + raise_on_document_error: bool = False, ) -> Result[Union[bool, List[Union[Json, ArangoServerError]]]]: """Delete multiple documents. @@ -2256,6 +2269,11 @@ def delete_many( index caches if document operations affect the edge index or cache-enabled persistent indexes. :type refill_index_caches: bool | None + :param raise_on_document_error: Whether to raise if a DocumentRevisionError + or a DocumentDeleteError is encountered on an individual document, + as opposed to returning the error as an object in the result list. + Defaults to False. + :type raise_on_document_error: bool :return: List of document metadata (e.g. document keys, revisions) and any exceptions, or True if parameter **silent** was set to True. :rtype: [dict | ArangoServerError] | bool @@ -2307,6 +2325,10 @@ def response_handler( error = DocumentRevisionError(sub_resp, request) else: error = DocumentDeleteError(sub_resp, request) + + if raise_on_document_error: + raise error + results.append(error) return results diff --git a/tests/test_document.py b/tests/test_document.py index 7cb0a435..0dbca038 100644 --- a/tests/test_document.py +++ b/tests/test_document.py @@ -239,6 +239,10 @@ def test_document_insert_many(col, bad_col, docs): assert isinstance(result["old"], dict) assert isinstance(result["_old_rev"], str) + # Test insert_many with raise_on_document_error set to True + with assert_raises(DocumentInsertError) as err: + col.insert_many(docs, raise_on_document_error=True) + # Test get with bad database with assert_raises(DocumentInsertError) as err: bad_col.insert_many(docs) @@ -1092,6 +1096,10 @@ def test_document_delete_many(col, bad_col, docs): assert "[HTTP 202][ERR 1200]" in error.message assert len(col) == 6 + # Test delete_many with raise_on_document_error set to True + with assert_raises(DocumentRevisionError) as err: + col.delete_many(docs, raise_on_document_error=True) + # Test delete_many (documents) with missing documents empty_collection(col) results = col.delete_many( @@ -1109,6 +1117,10 @@ def test_document_delete_many(col, bad_col, docs): assert "[HTTP 202][ERR 1202]" in error.message assert len(col) == 0 + # Test delete_many with raise_on_document_error set to True + with assert_raises(DocumentDeleteError) as err: + col.delete_many(docs, raise_on_document_error=True) + # Test delete_many with bad database with assert_raises(DocumentDeleteError) as err: bad_col.delete_many(docs) From 6aa9986d89be9bbe4a245e925c981c22fe273fcc Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 12 May 2025 20:12:38 +0000 Subject: [PATCH 13/33] Wait for sync upon graph creation (#370) * Wait for sync upon graph creation * Using graph properties instead of list search --- arango/database.py | 25 ++++++++++++++++++++----- tests/test_graph.py | 2 +- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/arango/database.py b/arango/database.py index 17d7a124..8a145910 100644 --- a/arango/database.py +++ b/arango/database.py @@ -17,6 +17,7 @@ from arango.cluster import Cluster from arango.collection import StandardCollection from arango.connection import Connection +from arango.errno import HTTP_NOT_FOUND from arango.exceptions import ( AnalyzerCreateError, AnalyzerDeleteError, @@ -1644,12 +1645,14 @@ def has_graph(self, name: str) -> Result[bool]: :return: True if graph exists, False otherwise. :rtype: bool """ - request = Request(method="get", endpoint="/_api/gharial") + request = Request(method="get", endpoint=f"/_api/gharial/{name}") def response_handler(resp: Response) -> bool: - if not resp.is_success: - raise GraphListError(resp, request) - return any(name == graph["_key"] for graph in resp.body["graphs"]) + if resp.is_success: + return True + if resp.status_code == HTTP_NOT_FOUND: + return False + raise GraphListError(resp, request) return self._execute(request, response_handler) @@ -1699,6 +1702,7 @@ def create_graph( replication_factor: Optional[int] = None, write_concern: Optional[int] = None, satellite_collections: Optional[Sequence[str]] = None, + sync: Optional[bool] = None, ) -> Result[Graph]: """Create a new graph. @@ -1753,6 +1757,8 @@ def create_graph( element must be a string and a valid collection name. The collection type cannot be modified later. :type satellite_collections: [str] | None + :param sync: Wait until everything is synced to disk. + :type sync: bool | None :return: Graph API wrapper. :rtype: arango.graph.Graph :raise arango.exceptions.GraphCreateError: If create fails. @@ -1796,7 +1802,16 @@ def create_graph( if satellite_collections is not None: # pragma: no cover data["options"]["satellites"] = satellite_collections - request = Request(method="post", endpoint="/_api/gharial", data=data) + params: Params = {} + if sync is not None: + params["waitForSync"] = sync + + request = Request( + method="post", + endpoint="/_api/gharial", + data=data, + params=params, + ) def response_handler(resp: Response) -> Graph: if resp.is_success: diff --git a/tests/test_graph.py b/tests/test_graph.py index 4d2588cb..fe63455d 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -51,7 +51,7 @@ def test_graph_properties(graph, bad_graph, db): bad_graph.properties() new_graph_name = generate_graph_name() - new_graph = db.create_graph(new_graph_name) + new_graph = db.create_graph(new_graph_name, sync=True) properties = new_graph.properties() assert properties["id"] == f"_graphs/{new_graph_name}" assert properties["name"] == new_graph_name From 863d4cd7573a0a6406e3d1a6de969314f05a502b Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Jun 2025 10:49:39 +0000 Subject: [PATCH 14/33] Updated graphs link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d4b995fd..d90d8458 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ cursor = db.aql.execute("FOR doc IN students RETURN doc") student_names = [document["name"] for document in cursor] ``` -Another example with [graphs](https://www.arangodb.com/docs/stable/graphs.html): +Another example with [graphs](https://docs.arangodb.com/stable/graphs/): ```python from arango import ArangoClient From 3769989166098b05849223bdc3eb90d934cb12ba Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Wed, 4 Jun 2025 06:14:06 +0300 Subject: [PATCH 15/33] API Updates (#372) * Updated API * Bumping up driver version * Updating docs --- arango/collection.py | 111 ++++++++------------------- arango/exceptions.py | 4 + arango/graph.py | 179 ++++++++++++++++++++++++------------------- arango/request.py | 2 +- docs/graph.rst | 3 +- tests/test_graph.py | 20 ++--- 6 files changed, 149 insertions(+), 170 deletions(-) diff --git a/arango/collection.py b/arango/collection.py index a996dc5c..a48bfd2a 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -3008,9 +3008,8 @@ def insert( self, vertex: Json, sync: Optional[bool] = None, - silent: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new vertex document. :param vertex: New vertex document to insert. If it has "_key" or "_id" @@ -3019,20 +3018,16 @@ def insert( :type vertex: dict :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision), or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ vertex = self._ensure_key_from_id(vertex) - params: Params = {"silent": silent, "returnNew": return_new} + params: Params = {"returnNew": return_new} if sync is not None: params["waitForSync"] = sync @@ -3044,11 +3039,9 @@ def insert( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if not resp.is_success: raise DocumentInsertError(resp, request) - if silent: - return True return format_vertex(resp.body) return self._execute(request, response_handler) @@ -3059,10 +3052,9 @@ def update( check_rev: bool = True, keep_none: bool = True, sync: Optional[bool] = None, - silent: bool = False, return_old: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Update a vertex document. :param vertex: Partial or full vertex document with updated values. It @@ -3076,18 +3068,14 @@ def update( :type keep_none: bool | None :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_old: Include body of the old document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_old: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentUpdateError: If update fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ @@ -3096,7 +3084,6 @@ def update( params: Params = { "keepNull": keep_none, "overwrite": not check_rev, - "silent": silent, "returnNew": return_new, "returnOld": return_old, } @@ -3112,13 +3099,11 @@ def update( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if resp.status_code == 412: # pragma: no cover raise DocumentRevisionError(resp, request) elif not resp.is_success: raise DocumentUpdateError(resp, request) - if silent is True: - return True return format_vertex(resp.body) return self._execute(request, response_handler) @@ -3128,10 +3113,9 @@ def replace( vertex: Json, check_rev: bool = True, sync: Optional[bool] = None, - silent: bool = False, return_old: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Replace a vertex document. :param vertex: New vertex document to replace the old one with. It must @@ -3142,25 +3126,20 @@ def replace( :type check_rev: bool :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_old: Include body of the old document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_old: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentReplaceError: If replace fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ vertex_id, headers = self._prep_from_body(vertex, check_rev) params: Params = { - "silent": silent, "returnNew": return_new, "returnOld": return_old, } @@ -3176,13 +3155,11 @@ def replace( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if resp.status_code == 412: # pragma: no cover raise DocumentRevisionError(resp, request) elif not resp.is_success: raise DocumentReplaceError(resp, request) - if silent is True: - return True return format_vertex(resp.body) return self._execute(request, response_handler) @@ -3326,9 +3303,8 @@ def insert( self, edge: Json, sync: Optional[bool] = None, - silent: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new edge document. :param edge: New edge document to insert. It must contain "_from" and @@ -3338,20 +3314,16 @@ def insert( :type edge: dict :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ edge = self._ensure_key_from_id(edge) - params: Params = {"silent": silent, "returnNew": return_new} + params: Params = {"returnNew": return_new} if sync is not None: params["waitForSync"] = sync @@ -3363,11 +3335,9 @@ def insert( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if not resp.is_success: raise DocumentInsertError(resp, request) - if silent: - return True return format_edge(resp.body) return self._execute(request, response_handler) @@ -3378,10 +3348,9 @@ def update( check_rev: bool = True, keep_none: bool = True, sync: Optional[bool] = None, - silent: bool = False, return_old: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Update an edge document. :param edge: Partial or full edge document with updated values. It must @@ -3395,18 +3364,14 @@ def update( :type keep_none: bool | None :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_old: Include body of the old document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_old: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentUpdateError: If update fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ @@ -3415,7 +3380,6 @@ def update( params: Params = { "keepNull": keep_none, "overwrite": not check_rev, - "silent": silent, "returnNew": return_new, "returnOld": return_old, } @@ -3431,13 +3395,11 @@ def update( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if resp.status_code == 412: # pragma: no cover raise DocumentRevisionError(resp, request) if not resp.is_success: raise DocumentUpdateError(resp, request) - if silent is True: - return True return format_edge(resp.body) return self._execute(request, response_handler) @@ -3447,10 +3409,9 @@ def replace( edge: Json, check_rev: bool = True, sync: Optional[bool] = None, - silent: bool = False, return_old: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Replace an edge document. :param edge: New edge document to replace the old one with. It must @@ -3462,25 +3423,20 @@ def replace( :type check_rev: bool :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_old: Include body of the old document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_old: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentReplaceError: If replace fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ edge_id, headers = self._prep_from_body(edge, check_rev) params: Params = { - "silent": silent, "returnNew": return_new, "returnOld": return_old, } @@ -3496,13 +3452,11 @@ def replace( write=self.name, ) - def response_handler(resp: Response) -> Union[bool, Json]: + def response_handler(resp: Response) -> Json: if resp.status_code == 412: # pragma: no cover raise DocumentRevisionError(resp, request) if not resp.is_success: raise DocumentReplaceError(resp, request) - if silent is True: - return True return format_edge(resp.body) return self._execute(request, response_handler) @@ -3575,9 +3529,8 @@ def link( to_vertex: Union[str, Json], data: Optional[Json] = None, sync: Optional[bool] = None, - silent: bool = False, return_new: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new edge document linking the given vertices. :param from_vertex: "From" vertex document ID or body with "_id" field. @@ -3590,21 +3543,17 @@ def link( :type data: dict | None :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool :param return_new: Include body of the new document in the returned metadata. Ignored if parameter **silent** is set to True. :type return_new: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ edge = {"_from": get_doc_id(from_vertex), "_to": get_doc_id(to_vertex)} if data is not None: edge.update(self._ensure_key_from_id(data)) - return self.insert(edge, sync=sync, silent=silent, return_new=return_new) + return self.insert(edge, sync=sync, return_new=return_new) def edges( self, diff --git a/arango/exceptions.py b/arango/exceptions.py index 29bcdc17..cdea90c5 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -543,6 +543,10 @@ class VertexCollectionDeleteError(ArangoServerError): """Failed to delete vertex collection.""" +class EdgeCollectionListError(ArangoServerError): + """Failed to retrieve edge collections.""" + + class EdgeDefinitionListError(ArangoServerError): """Failed to retrieve edge definitions.""" diff --git a/arango/graph.py b/arango/graph.py index 3279129f..589e9c44 100644 --- a/arango/graph.py +++ b/arango/graph.py @@ -7,6 +7,7 @@ from arango.collection import EdgeCollection, VertexCollection from arango.connection import Connection from arango.exceptions import ( + EdgeCollectionListError, EdgeDefinitionCreateError, EdgeDefinitionDeleteError, EdgeDefinitionListError, @@ -136,19 +137,28 @@ def vertex_collection(self, name: str) -> VertexCollection: """ return VertexCollection(self._conn, self._executor, self._name, name) - def create_vertex_collection(self, name: str) -> Result[VertexCollection]: + def create_vertex_collection( + self, + name: str, + options: Optional[Json] = None, + ) -> Result[VertexCollection]: """Create a vertex collection in the graph. :param name: Vertex collection name. :type name: str + :param options: Additional options for creating vertex collections. + :type options: dict | None :return: Vertex collection API wrapper. :rtype: arango.collection.VertexCollection :raise arango.exceptions.VertexCollectionCreateError: If create fails. """ + data: Json = {"collection": name} + if options is not None: + data["options"] = options request = Request( method="post", endpoint=f"/_api/gharial/{self._name}/vertex", - data={"collection": name}, + data=data, ) def response_handler(resp: Response) -> VertexCollection: @@ -259,6 +269,7 @@ def create_edge_definition( edge_collection: str, from_vertex_collections: Sequence[str], to_vertex_collections: Sequence[str], + options: Optional[Json] = None, ) -> Result[EdgeCollection]: """Create a new edge definition. @@ -279,18 +290,24 @@ def create_edge_definition( :type from_vertex_collections: [str] :param to_vertex_collections: Names of "to" vertex collections. :type to_vertex_collections: [str] + :param options: Additional options for creating edge definitions. + :type options: dict | None :return: Edge collection API wrapper. :rtype: arango.collection.EdgeCollection :raise arango.exceptions.EdgeDefinitionCreateError: If create fails. """ + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + } + if options is not None: + data["options"] = options + request = Request( method="post", endpoint=f"/_api/gharial/{self._name}/edge", - data={ - "collection": edge_collection, - "from": from_vertex_collections, - "to": to_vertex_collections, - }, + data=data, ) def response_handler(resp: Response) -> EdgeCollection: @@ -300,11 +317,32 @@ def response_handler(resp: Response) -> EdgeCollection: return self._execute(request, response_handler) + def edge_collections(self) -> Result[List[str]]: + """Get the names of all edge collections in the graph. + + :return: Edge collections in the graph. + :rtype: list + :raise: arango.exceptions.EdgeCollectionListError: If retrieval fails. + """ + request = Request( + method="get", + endpoint=f"/_api/gharial/{self._name}/edge", + ) + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise EdgeCollectionListError(resp, request) + return list(sorted(resp.body["collections"])) + + return self._execute(request, response_handler) + def replace_edge_definition( self, edge_collection: str, from_vertex_collections: Sequence[str], to_vertex_collections: Sequence[str], + sync: Optional[bool] = None, + purge: bool = False, ) -> Result[EdgeCollection]: """Replace an edge definition. @@ -314,18 +352,28 @@ def replace_edge_definition( :type from_vertex_collections: [str] :param to_vertex_collections: Names of "to" vertex collections. :type to_vertex_collections: [str] + :param sync: Block until operation is synchronized to disk. + :type sync: bool | None + :param purge: Drop the edge collection in addition to removing it from + the graph. The collection is only dropped if it is not used in other graphs. + :type purge: bool :return: Edge collection API wrapper. :rtype: arango.collection.EdgeCollection :raise arango.exceptions.EdgeDefinitionReplaceError: If replace fails. """ + data: Json = { + "collection": edge_collection, + "from": from_vertex_collections, + "to": to_vertex_collections, + "purge": purge, + } + if sync is not None: + data["waitForSync"] = sync + request = Request( method="put", endpoint=f"/_api/gharial/{self._name}/edge/{edge_collection}", - data={ - "collection": edge_collection, - "from": from_vertex_collections, - "to": to_vertex_collections, - }, + data=data, ) def response_handler(resp: Response) -> EdgeCollection: @@ -335,7 +383,12 @@ def response_handler(resp: Response) -> EdgeCollection: return self._execute(request, response_handler) - def delete_edge_definition(self, name: str, purge: bool = False) -> Result[bool]: + def delete_edge_definition( + self, + name: str, + purge: bool = False, + sync: Optional[bool] = None, + ) -> Result[bool]: """Delete an edge definition from the graph. :param name: Edge collection name. @@ -344,14 +397,20 @@ def delete_edge_definition(self, name: str, purge: bool = False) -> Result[bool] from the graph but the edge collection is also deleted completely from the database. :type purge: bool + :sync: Block until operation is synchronized to disk. + :type sync: bool | None :return: True if edge definition was deleted successfully. :rtype: bool :raise arango.exceptions.EdgeDefinitionDeleteError: If delete fails. """ + params: Json = {"dropCollections": purge} + if sync is not None: + params["waitForSync"] = sync + request = Request( method="delete", endpoint=f"/_api/gharial/{self._name}/edge/{name}", - params={"dropCollections": purge}, + params=params, ) def response_handler(resp: Response) -> bool: @@ -549,8 +608,7 @@ def insert_vertex( collection: str, vertex: Json, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new vertex document. :param collection: Vertex collection name. @@ -561,15 +619,11 @@ def insert_vertex( :type vertex: dict :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ - return self.vertex_collection(collection).insert(vertex, sync, silent) + return self.vertex_collection(collection).insert(vertex, sync) def update_vertex( self, @@ -577,8 +631,7 @@ def update_vertex( check_rev: bool = True, keep_none: bool = True, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Update a vertex document. :param vertex: Partial or full vertex document with updated values. It @@ -592,12 +645,8 @@ def update_vertex( :type keep_none: bool :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentUpdateError: If update fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ @@ -606,7 +655,6 @@ def update_vertex( check_rev=check_rev, keep_none=keep_none, sync=sync, - silent=silent, ) def replace_vertex( @@ -614,8 +662,7 @@ def replace_vertex( vertex: Json, check_rev: bool = True, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Replace a vertex document. :param vertex: New vertex document to replace the old one with. It must @@ -626,17 +673,15 @@ def replace_vertex( :type check_rev: bool :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentReplaceError: If replace fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ return self._get_col_by_vertex(vertex).replace( - vertex=vertex, check_rev=check_rev, sync=sync, silent=silent + vertex=vertex, + check_rev=check_rev, + sync=sync, ) def delete_vertex( @@ -727,8 +772,7 @@ def insert_edge( collection: str, edge: Json, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new edge document. :param collection: Edge collection name. @@ -740,15 +784,11 @@ def insert_edge( :type edge: dict :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ - return self.edge_collection(collection).insert(edge, sync, silent) + return self.edge_collection(collection).insert(edge, sync) def update_edge( self, @@ -756,8 +796,7 @@ def update_edge( check_rev: bool = True, keep_none: bool = True, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Update an edge document. :param edge: Partial or full edge document with updated values. It must @@ -771,12 +810,8 @@ def update_edge( :type keep_none: bool | None :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentUpdateError: If update fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ @@ -785,7 +820,6 @@ def update_edge( check_rev=check_rev, keep_none=keep_none, sync=sync, - silent=silent, ) def replace_edge( @@ -793,8 +827,7 @@ def replace_edge( edge: Json, check_rev: bool = True, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Replace an edge document. :param edge: New edge document to replace the old one with. It must @@ -806,17 +839,15 @@ def replace_edge( :type check_rev: bool :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentReplaceError: If replace fails. :raise arango.exceptions.DocumentRevisionError: If revisions mismatch. """ return self._get_col_by_edge(edge).replace( - edge=edge, check_rev=check_rev, sync=sync, silent=silent + edge=edge, + check_rev=check_rev, + sync=sync, ) def delete_edge( @@ -865,8 +896,7 @@ def link( to_vertex: Union[str, Json], data: Optional[Json] = None, sync: Optional[bool] = None, - silent: bool = False, - ) -> Result[Union[bool, Json]]: + ) -> Result[Json]: """Insert a new edge document linking the given vertices. :param collection: Edge collection name. @@ -881,12 +911,8 @@ def link( :type data: dict :param sync: Block until operation is synchronized to disk. :type sync: bool | None - :param silent: If set to True, no document metadata is returned. This - can be used to save resources. - :type silent: bool - :return: Document metadata (e.g. document key, revision) or True if - parameter **silent** was set to True. - :rtype: bool | dict + :return: Document metadata (e.g. document key, revision). + :rtype: dict :raise arango.exceptions.DocumentInsertError: If insert fails. """ return self.edge_collection(collection).link( @@ -894,7 +920,6 @@ def link( to_vertex=to_vertex, data=data, sync=sync, - silent=silent, ) def edges( diff --git a/arango/request.py b/arango/request.py index abb2b0db..11ff91c6 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.1.7" + driver_version = "8.2.0" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/docs/graph.rst b/docs/graph.rst index 0b37154f..0645d5b6 100644 --- a/docs/graph.rst +++ b/docs/graph.rst @@ -83,6 +83,7 @@ Here is an example showing how edge definitions are managed: # Create an edge definition named "teach". This creates any missing # collections and returns an API wrapper for "teach" edge collection. + # At first, create a wrong teachers->teachers mapping intentionally. if not school.has_edge_definition('teach'): teach = school.create_edge_definition( edge_collection='teach', @@ -93,7 +94,7 @@ Here is an example showing how edge definitions are managed: # List edge definitions. school.edge_definitions() - # Replace the edge definition. + # Replace with the correct edge definition. school.replace_edge_definition( edge_collection='teach', from_vertex_collections=['teachers'], diff --git a/tests/test_graph.py b/tests/test_graph.py index fe63455d..fd3242fe 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -334,6 +334,7 @@ def test_create_graph_with_edge_definition(db): ) assert edge_definition in new_graph.edge_definitions() assert ovcol_name in new_graph.vertex_collections() + assert edge_definition["edge_collection"] in new_graph.edge_collections() def test_vertex_management(fvcol, bad_fvcol, fvdocs): @@ -394,7 +395,7 @@ def test_vertex_management(fvcol, bad_fvcol, fvdocs): key = vertex["_key"] # Test insert third valid vertex with silent set to True - assert fvcol.insert(vertex, silent=True) is True + assert isinstance(fvcol.insert(vertex), dict) assert len(fvcol) == 3 assert fvcol[key]["val"] == vertex["val"] @@ -444,7 +445,7 @@ def test_vertex_management(fvcol, bad_fvcol, fvdocs): # Test update vertex with silent set to True assert "bar" not in fvcol[vertex] - assert fvcol.update({"_key": key, "bar": 200}, silent=True) is True + assert isinstance(fvcol.update({"_key": key, "bar": 200}), dict) assert fvcol[vertex]["bar"] == 200 assert fvcol[vertex]["_rev"] != old_rev old_rev = fvcol[key]["_rev"] @@ -519,7 +520,7 @@ def test_vertex_management(fvcol, bad_fvcol, fvdocs): assert "vertex" in result # Test replace vertex with silent set to True - assert fvcol.replace({"_key": key, "bar": 200}, silent=True) is True + assert isinstance(fvcol.replace({"_key": key, "bar": 200}), dict) assert "foo" not in fvcol[key] assert "baz" not in fvcol[vertex] assert fvcol[vertex]["bar"] == 200 @@ -698,7 +699,7 @@ def test_edge_management(ecol, bad_ecol, edocs, fvcol, fvdocs, tvcol, tvdocs): key = edge["_key"] # Test insert second valid edge with silent set to True - assert ecol.insert(edge, sync=True, silent=True) is True + assert isinstance(ecol.insert(edge, sync=True), dict) assert edge in ecol and key in ecol assert len(ecol) == 2 assert ecol[key]["_from"] == edge["_from"] @@ -714,15 +715,14 @@ def test_edge_management(ecol, bad_ecol, edocs, fvcol, fvdocs, tvcol, tvdocs): # Test insert fourth valid edge using link method from_vertex = fvcol.get(fvdocs[2]) to_vertex = tvcol.get(tvdocs[0]) - assert ( + assert isinstance( ecol.link( from_vertex["_id"], to_vertex["_id"], {"_id": ecol.name + "/foo"}, sync=True, - silent=True, - ) - is True + ), + dict, ) assert "foo" in ecol assert len(ecol) == 4 @@ -816,7 +816,7 @@ def test_edge_management(ecol, bad_ecol, edocs, fvcol, fvdocs, tvcol, tvdocs): old_rev = result["_rev"] # Test update edge with silent option - assert ecol.update({"_key": key, "bar": 600}, silent=True) is True + assert isinstance(ecol.update({"_key": key, "bar": 600}), dict) assert ecol[key]["foo"] == 200 assert ecol[key]["bar"] == 600 assert ecol[key]["_rev"] != old_rev @@ -852,7 +852,7 @@ def test_edge_management(ecol, bad_ecol, edocs, fvcol, fvdocs, tvcol, tvdocs): # Test replace edge with silent set to True edge["bar"] = 200 - assert ecol.replace(edge, silent=True) is True + assert isinstance(ecol.replace(edge), dict) assert ecol[key]["foo"] == 100 assert ecol[key]["bar"] == 200 assert ecol[key]["_rev"] != old_rev From 7fc1037dcfbe4b65115f7ebd396f1f3dc8f6cfac Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Wed, 9 Jul 2025 04:46:46 +0300 Subject: [PATCH 16/33] Skip _db prefix on /_open/auth (#374) * Updating example version * No longer use _db/... prefix on /_open/auth * Fixing test dependant on version --- arango/connection.py | 20 ++++++++++++++++---- starter.sh | 2 +- tests/test_database.py | 11 ++++++++--- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/arango/connection.py b/arango/connection.py index 8de2643a..9384aef1 100644 --- a/arango/connection.py +++ b/arango/connection.py @@ -125,7 +125,11 @@ def prep_response(self, resp: Response, deserialize: bool = True) -> Response: return resp def process_request( - self, host_index: int, request: Request, auth: Optional[Tuple[str, str]] = None + self, + host_index: int, + request: Request, + auth: Optional[Tuple[str, str]] = None, + skip_db_prefix: bool = False, ) -> Response: """Execute a request until a valid response has been returned. @@ -133,6 +137,10 @@ def process_request( :type host_index: int :param request: HTTP request. :type request: arango.request.Request + :param auth: HTTP basic authentication tuple (username, password). + :type auth: tuple[str, str] | None + :param skip_db_prefix: Skip the database prefix in the URL. + :type skip_db_prefix: bool :return: HTTP response. :rtype: arango.response.Response """ @@ -152,11 +160,16 @@ def process_request( request.headers["accept-encoding"] = self._response_compression while tries < self._host_resolver.max_tries: + if skip_db_prefix: + url = self._hosts[host_index] + request.endpoint + else: + url = self._url_prefixes[host_index] + request.endpoint + try: resp = self._http.send_request( session=self._sessions[host_index], method=request.method, - url=self._url_prefixes[host_index] + request.endpoint, + url=url, params=request.params, data=data, headers=request.headers, @@ -165,7 +178,6 @@ def process_request( return self.prep_response(resp, request.deserialize) except ConnectionError: - url = self._url_prefixes[host_index] + request.endpoint logging.debug(f"ConnectionError: {url}") if len(indexes_to_filter) == self._host_resolver.host_count - 1: @@ -425,7 +437,7 @@ def refresh_token(self) -> None: host_index = self._host_resolver.get_host_index() - resp = self.process_request(host_index, request) + resp = self.process_request(host_index, request, skip_db_prefix=True) if not resp.is_success: raise JWTAuthError(resp, request) diff --git a/starter.sh b/starter.sh index b4e39f24..a5126f54 100755 --- a/starter.sh +++ b/starter.sh @@ -6,7 +6,7 @@ # Usage: # ./starter.sh [single|cluster] [community|enterprise] [version] # Example: -# ./starter.sh cluster enterprise 3.12.1 +# ./starter.sh cluster enterprise 3.12.5 setup="${1:-single}" license="${2:-community}" diff --git a/tests/test_database.py b/tests/test_database.py index 0b1d9752..d6595a4d 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -439,18 +439,23 @@ def test_database_utf8(sys_db, special_db_names): assert sys_db.delete_database(name) -def test_license(sys_db, enterprise): +def test_license(sys_db, enterprise, db_version): license = sys_db.license() assert isinstance(license, dict) - if enterprise: - assert set(license.keys()) == { + if db_version >= version.parse("3.12.5"): + expected_keys = {"diskUsage", "upgrading"} + else: + expected_keys = { "upgrading", "features", "license", "version", "status", } + + if enterprise: + assert set(license.keys()) == expected_keys else: assert license == {"license": "none"} with pytest.raises(ServerLicenseSetError): From 3dfbe2584bfb5f5b512e87e83a7c19b6b0d4af58 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Wed, 9 Jul 2025 01:48:16 +0000 Subject: [PATCH 17/33] Bump driver version --- arango/request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arango/request.py b/arango/request.py index 11ff91c6..8735afe6 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.0" + driver_version = "8.2.1" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", From 0366a1e2a1d22269db887a4ff4d55959a67a44a1 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Tue, 15 Jul 2025 04:17:13 +0300 Subject: [PATCH 18/33] Adding support for /key-generators (#375) --- arango/database.py | 18 ++++++++++++++++++ arango/exceptions.py | 4 ++++ tests/test_database.py | 7 +++++++ 3 files changed, 29 insertions(+) diff --git a/arango/database.py b/arango/database.py index 8a145910..20e771d2 100644 --- a/arango/database.py +++ b/arango/database.py @@ -27,6 +27,7 @@ AsyncJobListError, CollectionCreateError, CollectionDeleteError, + CollectionKeyGeneratorsError, CollectionListError, DatabaseCompactError, DatabaseCreateError, @@ -1623,6 +1624,23 @@ def response_handler(resp: Response) -> bool: return self._execute(request, response_handler) + def key_generators(self) -> Result[List[str]]: + """Returns the available key generators for collections. + + :return: List of available key generators. + :rtype: [str] + :raise arango.exceptions.CollectionKeyGeneratorsError: If retrieval fails. + """ # noqa: E501 + request = Request(method="get", endpoint="/_api/key-generators") + + def response_handler(resp: Response) -> List[str]: + if not resp.is_success: + raise CollectionKeyGeneratorsError(resp, request) + result: List[str] = resp.body["keyGenerators"] + return result + + return self._execute(request, response_handler) + #################### # Graph Management # #################### diff --git a/arango/exceptions.py b/arango/exceptions.py index cdea90c5..789468ed 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -298,6 +298,10 @@ class CollectionTruncateError(ArangoServerError): """Failed to truncate collection.""" +class CollectionKeyGeneratorsError(ArangoServerError): + """Failed to retrieve key generators.""" + + class CollectionLoadError(ArangoServerError): """Failed to load collection.""" diff --git a/tests/test_database.py b/tests/test_database.py index d6595a4d..014f0235 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -13,6 +13,7 @@ USE_SYSTEM_DATABASE, ) from arango.exceptions import ( + CollectionKeyGeneratorsError, DatabaseCompactError, DatabaseCreateError, DatabaseDeleteError, @@ -348,6 +349,12 @@ def test_database_misc_methods(client, sys_db, db, bad_db, cluster, secret, db_v result = db_superuser.compact() assert result == {} + if db_version >= version.parse("3.12.0"): + key_generators = db.key_generators() + assert isinstance(key_generators, list) + with pytest.raises(CollectionKeyGeneratorsError): + bad_db.key_generators() + def test_database_management(db, sys_db, bad_db): # Test list databases From 08a2e54a69d7b0603bed7552fe0dc715d8632ef0 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 4 Aug 2025 13:15:43 +0800 Subject: [PATCH 19/33] Deprecate load/unload methods (#376) * Deprecating load method * Deprecating unload method * Minor comment correction --- arango/backup.py | 2 +- arango/collection.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/arango/backup.py b/arango/backup.py index c06e4e15..d82e8e2c 100644 --- a/arango/backup.py +++ b/arango/backup.py @@ -33,7 +33,7 @@ def get(self, backup_id: Optional[str] = None) -> Result[Json]: :type backup_id: str :return: Backup details. :rtype: dict - :raise arango.exceptions.BackupGetError: If delete fails. + :raise arango.exceptions.BackupGetError: If the operation fails. """ request = Request( method="post", diff --git a/arango/collection.py b/arango/collection.py index a48bfd2a..2b13884a 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -537,10 +537,18 @@ def response_handler(resp: Response) -> Json: def load(self) -> Result[bool]: """Load the collection into memory. + .. note:: + The load function is deprecated from version 3.8.0 onwards and is a + no-op from version 3.9.0 onwards. It should no longer be used, as it + may be removed in a future version of ArangoDB. + :return: True if collection was loaded successfully. :rtype: bool :raise arango.exceptions.CollectionLoadError: If operation fails. """ + m = "The load function is deprecated from version 3.8.0 onwards and is a no-op from version 3.9.0 onwards." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + request = Request(method="put", endpoint=f"/_api/collection/{self.name}/load") def response_handler(resp: Response) -> bool: @@ -553,10 +561,18 @@ def response_handler(resp: Response) -> bool: def unload(self) -> Result[bool]: """Unload the collection from memory. + .. note:: + The unload function is deprecated from version 3.8.0 onwards and is a + no-op from version 3.9.0 onwards. It should no longer be used, as it + may be removed in a future version of ArangoDB. + :return: True if collection was unloaded successfully. :rtype: bool :raise arango.exceptions.CollectionUnloadError: If operation fails. """ + m = "The unload function is deprecated from version 3.8.0 onwards and is a no-op from version 3.9.0 onwards." # noqa: E501 + warn(m, DeprecationWarning, stacklevel=2) + request = Request(method="put", endpoint=f"/_api/collection/{self.name}/unload") def response_handler(resp: Response) -> bool: From 453f74b667c4d634196bed15cba2531957dfa7dd Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 18 Aug 2025 14:18:03 +0800 Subject: [PATCH 20/33] Getting the driver up-to-date (#377) * Adding missing parts * Bumping driver version --- arango/cluster.py | 2 +- arango/database.py | 23 ++++++++++++++++++++++- arango/exceptions.py | 4 ++++ arango/request.py | 2 +- docs/foxx.rst | 4 ++-- tests/test_database.py | 6 ++++++ 6 files changed, 36 insertions(+), 5 deletions(-) diff --git a/arango/cluster.py b/arango/cluster.py index ea13279d..78fd3ac9 100644 --- a/arango/cluster.py +++ b/arango/cluster.py @@ -261,7 +261,7 @@ def endpoints(self) -> Result[List[str]]: :return: List of endpoints. :rtype: [str] - :raise arango.exceptions.ServerEndpointsError: If retrieval fails. + :raise arango.exceptions.ClusterEndpointsError: If retrieval fails. """ request = Request(method="get", endpoint="/_api/cluster/endpoints") diff --git a/arango/database.py b/arango/database.py index 20e771d2..766161df 100644 --- a/arango/database.py +++ b/arango/database.py @@ -45,6 +45,7 @@ PermissionResetError, PermissionUpdateError, ServerAvailableOptionsGetError, + ServerCheckAvailabilityError, ServerCurrentOptionsGetError, ServerDetailsError, ServerEchoError, @@ -445,7 +446,7 @@ def set_license(self, license: str, force: bool = False) -> Result[Json]: :type force: bool :return: Server license. :rtype: dict - :raise arango.exceptions.ServerLicenseError: If retrieval fails. + :raise arango.exceptions.ServerLicenseSetError: If retrieval fails. """ request = Request( method="put", @@ -481,6 +482,25 @@ def response_handler(resp: Response) -> Json: return self._execute(request, response_handler) + def check_availability(self) -> Result[str]: + """Return ArangoDB server availability mode. + + :return: Server availability mode ("readonly" or "default"). + :rtype: str + :raise arango.exceptions.ServerCheckAvailabilityError: If retrieval fails. + """ + request = Request( + method="get", + endpoint="/_admin/server/availability", + ) + + def response_handler(resp: Response) -> str: + if not resp.is_success: + raise ServerCheckAvailabilityError(resp, request) + return str(resp.body["mode"]) + + return self._execute(request, response_handler) + def compact( self, change_level: Optional[bool] = None, @@ -1069,6 +1089,7 @@ def metrics(self) -> Result[str]: :return: Server metrics in Prometheus format. :rtype: str + :raise arango.exceptions.ServerMetricsError: If operation fails. """ request = Request(method="get", endpoint="/_admin/metrics/v2") diff --git a/arango/exceptions.py b/arango/exceptions.py index 789468ed..891c813e 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -654,6 +654,10 @@ class ServerTimeError(ArangoServerError): """Failed to retrieve server system time.""" +class ServerCheckAvailabilityError(ArangoServerError): + """Failed to retrieve server availability mode.""" + + class ServerEchoError(ArangoServerError): """Failed to retrieve details on last request.""" diff --git a/arango/request.py b/arango/request.py index 8735afe6..4bb135a5 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.1" + driver_version = "8.2.2" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/docs/foxx.rst b/docs/foxx.rst index 4f6ce35e..734a3168 100644 --- a/docs/foxx.rst +++ b/docs/foxx.rst @@ -83,9 +83,9 @@ information, refer to `ArangoDB manual`_. foxx.readme(service_mount) foxx.swagger(service_mount) foxx.download(service_mount) - foxx.commit(service_mount) + foxx.commit() foxx.scripts(service_mount) - foxx.run_script(service_mount, 'setup', []) + foxx.run_script(service_mount, 'setup', {}) foxx.run_tests(service_mount, reporter='xunit', output_format='xml') # Delete a service. diff --git a/tests/test_database.py b/tests/test_database.py index 014f0235..4e8a160e 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -20,6 +20,7 @@ DatabaseListError, DatabasePropertiesError, DatabaseSupportInfoError, + ServerCheckAvailabilityError, ServerDetailsError, ServerEchoError, ServerEngineError, @@ -355,6 +356,11 @@ def test_database_misc_methods(client, sys_db, db, bad_db, cluster, secret, db_v with pytest.raises(CollectionKeyGeneratorsError): bad_db.key_generators() + with pytest.raises(ServerCheckAvailabilityError): + bad_db.check_availability() + availability = db.check_availability() + assert isinstance(availability, str) + def test_database_management(db, sys_db, bad_db): # Test list databases From 8bd2d11755958ede2cfe14bfec253d4ce8f606b8 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sat, 1 Nov 2025 00:07:50 +0800 Subject: [PATCH 21/33] Updating test matrix (#379) --- .circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6c7e8ae6..fb535db4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,10 +11,10 @@ workflows: name: Python (<< matrix.python_version >>) - ArangoDB (<< matrix.arangodb_license >>, << matrix.arangodb_version >> << matrix.arangodb_config >>) matrix: parameters: - python_version: ["3.9", "3.10", "3.11", "3.12"] + python_version: ["3.10", "3.11", "3.12"] arangodb_config: ["single", "cluster"] - arangodb_license: ["community", "enterprise"] - arangodb_version: ["3.11", "latest"] + arangodb_license: ["enterprise"] + arangodb_version: ["latest"] jobs: lint: From c2b88b04f2493c2f71f33b2d227cae8a6b52b2d5 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 2 Nov 2025 21:04:02 +0800 Subject: [PATCH 22/33] New API methods (#378) * Adding aql-queries endpoint * Adding api-calls endpoint * bumped driver version * bumped driver version --------- Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --- arango/aql.py | 18 ++++++++++++++++++ arango/database.py | 21 +++++++++++++++++++++ arango/exceptions.py | 8 ++++++++ arango/request.py | 2 +- starter.sh | 4 ++-- tests/test_aql.py | 9 ++++++++- tests/test_database.py | 7 +++++++ 7 files changed, 65 insertions(+), 4 deletions(-) diff --git a/arango/aql.py b/arango/aql.py index 25786302..fdf92f84 100644 --- a/arango/aql.py +++ b/arango/aql.py @@ -17,6 +17,7 @@ AQLQueryClearError, AQLQueryExecuteError, AQLQueryExplainError, + AQLQueryHistoryError, AQLQueryKillError, AQLQueryListError, AQLQueryRulesGetError, @@ -627,6 +628,23 @@ def response_handler(resp: Response) -> Json: return self._execute(request, response_handler) + def history(self) -> Result[Json]: + """Return recently executed AQL queries (admin only). + + :return: AQL query history. + :rtype: dict + :raise arango.exceptions.AQLQueryHistoryError: If retrieval fails. + """ + request = Request(method="get", endpoint="/_admin/server/aql-queries") + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise AQLQueryHistoryError(resp, request) + res: Json = resp.body["result"] + return res + + return self._execute(request, response_handler) + def functions(self) -> Result[Jsons]: """List the AQL functions defined in the database. diff --git a/arango/database.py b/arango/database.py index 766161df..130b7b5a 100644 --- a/arango/database.py +++ b/arango/database.py @@ -44,6 +44,7 @@ PermissionListError, PermissionResetError, PermissionUpdateError, + ServerAPICallsError, ServerAvailableOptionsGetError, ServerCheckAvailabilityError, ServerCurrentOptionsGetError, @@ -463,6 +464,26 @@ def response_handler(resp: Response) -> Json: return self._execute(request, response_handler) + def api_calls(self) -> Result[Json]: + """Return recent API calls (admin only). + + :return: API calls history. + :rtype: dict + :raise arango.exceptions.ServerAPICallsError: If retrieval fails. + """ + request = Request( + method="get", + endpoint="/_admin/server/api-calls", + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise ServerAPICallsError(resp, request) + res: Json = resp.body["result"] + return res + + return self._execute(request, response_handler) + def status(self) -> Result[Json]: """Return ArangoDB server status. diff --git a/arango/exceptions.py b/arango/exceptions.py index 891c813e..7fc62983 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -121,6 +121,10 @@ class AQLQueryTrackingGetError(ArangoServerError): """Failed to retrieve AQL tracking properties.""" +class AQLQueryHistoryError(ArangoServerError): + """Failed to retrieve recent AQL queries.""" + + class AQLQueryTrackingSetError(ArangoServerError): """Failed to configure AQL tracking properties.""" @@ -638,6 +642,10 @@ class ServerDetailsError(ArangoServerError): """Failed to retrieve server details.""" +class ServerAPICallsError(ArangoServerError): + """Failed to retrieve recent API calls.""" + + class ServerLicenseGetError(ArangoServerError): """Failed to retrieve server license.""" diff --git a/arango/request.py b/arango/request.py index 4bb135a5..1c0e6ccb 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.2" + driver_version = "8.2.3" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/starter.sh b/starter.sh index a5126f54..faf8561f 100755 --- a/starter.sh +++ b/starter.sh @@ -8,8 +8,8 @@ # Example: # ./starter.sh cluster enterprise 3.12.5 -setup="${1:-single}" -license="${2:-community}" +setup="${1:-cluster}" +license="${2:-enterprise}" version="${3:-latest}" extra_ports="" diff --git a/tests/test_aql.py b/tests/test_aql.py index 6c1d3ea3..f4074d63 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -13,6 +13,7 @@ AQLQueryClearError, AQLQueryExecuteError, AQLQueryExplainError, + AQLQueryHistoryError, AQLQueryKillError, AQLQueryListError, AQLQueryTrackingGetError, @@ -30,7 +31,7 @@ def test_aql_attributes(db, username): assert repr(db.aql.cache) == f"" -def test_aql_query_management(db_version, db, bad_db, col, docs): +def test_aql_query_management(db_version, db, sys_db, bad_db, col, docs): explain_fields = [ "estimatedNrItems", "estimatedCost", @@ -192,6 +193,12 @@ def test_aql_query_management(db_version, db, bad_db, col, docs): db.begin_async_execution().aql.execute("RETURN SLEEP(100)") db.begin_async_execution().aql.execute("RETURN SLEEP(50)") + # Test query history + with assert_raises(AQLQueryHistoryError): + bad_db.aql.history() + history = sys_db.aql.history() + assert isinstance(history, dict) + # Test list queries queries = db.aql.queries() for query in queries: diff --git a/tests/test_database.py b/tests/test_database.py index 4e8a160e..1006df3b 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -20,6 +20,7 @@ DatabaseListError, DatabasePropertiesError, DatabaseSupportInfoError, + ServerAPICallsError, ServerCheckAvailabilityError, ServerDetailsError, ServerEchoError, @@ -314,6 +315,12 @@ def test_database_misc_methods(client, sys_db, db, bad_db, cluster, secret, db_v with assert_raises(ServerLogLevelResetError): bad_db.reset_log_levels() + # Test api calls history + with assert_raises(ServerAPICallsError): + bad_db.api_calls() + history = sys_db.api_calls() + assert isinstance(history, dict) + # Test get storage engine engine = db.engine() assert engine["name"] in ["rocksdb"] From 805fd9feef682364d85718dae8d42ac216717638 Mon Sep 17 00:00:00 2001 From: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> Date: Sun, 14 Dec 2025 06:01:48 -0500 Subject: [PATCH 23/33] new: `params` property in index (#380) --- arango/formatter.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/arango/formatter.py b/arango/formatter.py index 1c63bf00..d90f5a87 100644 --- a/arango/formatter.py +++ b/arango/formatter.py @@ -116,6 +116,10 @@ def format_index(body: Json, formatter: bool = True) -> Json: if "optimizeTopK" in body: result["optimizeTopK"] = body["optimizeTopK"] + # Introduced via Vector Index in 3.12.6 + if "params" in body: + result["params"] = body["params"] + return verify_format(body, result) From 3852b962acbf8e94d794ddb712387f9fd47a5cf8 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 19 Dec 2025 00:40:09 +0800 Subject: [PATCH 24/33] Fixing tests (#381) --- arango/formatter.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/arango/formatter.py b/arango/formatter.py index d90f5a87..ebbb070c 100644 --- a/arango/formatter.py +++ b/arango/formatter.py @@ -903,6 +903,10 @@ def format_view_consolidation_policy(body: Json) -> Json: result["segments_bytes_floor"] = body["segmentsBytesFloor"] if "minScore" in body: result["min_score"] = body["minScore"] + if "maxSkewThreshold" in body: + result["max_skew_threshold"] = body["maxSkewThreshold"] + if "minDeletionRatio" in body: + result["min_deletion_ratio"] = body["minDeletionRatio"] return verify_format(body, result) From 7d773616434fa53b1087888268b703586b6deebd Mon Sep 17 00:00:00 2001 From: Anthony Mahanna Date: Thu, 18 Dec 2025 11:53:51 -0500 Subject: [PATCH 25/33] bump: `driver_version` --- arango/request.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arango/request.py b/arango/request.py index 1c0e6ccb..5a213f68 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.3" + driver_version = "8.2.4" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", From 0d8e6ca47c957b1313d3f268f85a70cf529d4dbf Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 21 Dec 2025 00:32:07 +0800 Subject: [PATCH 26/33] Preparing tests to run against development build (#382) * Preparing tests to run against development build * Foxx tests * Foxx tests --- .circleci/config.yml | 76 ++++++++++------------- docs/contributing.rst | 4 +- pyproject.toml | 1 + tests/conftest.py | 110 ++++++++++++++++++++++++--------- tests/static/cluster-3.11.conf | 14 ----- tests/static/setup.sh | 7 --- tests/static/single-3.11.conf | 12 ---- tests/test_analyzer.py | 4 +- tests/test_aql.py | 4 +- tests/test_backup.py | 6 +- tests/test_client.py | 46 +++++++------- tests/test_collection.py | 7 ++- tests/test_database.py | 6 +- tests/test_foxx.py | 43 ++++++++----- tests/test_index.py | 4 +- tests/test_task.py | 7 ++- tests/test_transaction.py | 5 +- tests/test_view.py | 10 +-- 18 files changed, 198 insertions(+), 168 deletions(-) delete mode 100644 tests/static/cluster-3.11.conf delete mode 100644 tests/static/setup.sh delete mode 100644 tests/static/single-3.11.conf diff --git a/.circleci/config.yml b/.circleci/config.yml index fb535db4..ccb9cf98 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,7 +1,14 @@ version: 2.1 -orbs: - codecov: codecov/codecov@3.3.0 +executors: + python-container: + docker: + - image: cimg/python:3.12 + resource_class: small + python-vm: + machine: + image: ubuntu-2404:current + resource_class: medium workflows: ci: @@ -14,98 +21,81 @@ workflows: python_version: ["3.10", "3.11", "3.12"] arangodb_config: ["single", "cluster"] arangodb_license: ["enterprise"] - arangodb_version: ["latest"] + arangodb_version: ["3.12"] jobs: lint: - docker: - - image: python:latest + executor: python-container + resource_class: small steps: - checkout - run: name: Install Dependencies command: pip install .[dev] - - run: - name: Run black - command: black --check --verbose --diff --color --config=pyproject.toml ./arango ./tests/ - + name: Run black + command: black --check --verbose --diff --color --config=pyproject.toml ./arango ./tests/ - run: name: Run flake8 command: flake8 ./arango ./tests - - run: name: Run isort command: isort --check ./arango ./tests - - run: name: Run mypy command: mypy ./arango - test: parameters: - python_version: - type: string - arangodb_config: - type: string - arangodb_license: - type: string - arangodb_version: - type: string - # TODO: Reconsider using a docker image instead of a machine - # i.e cimg/python:<< parameters.python_version >> - machine: - image: ubuntu-2204:current + python_version: + type: string + arangodb_config: + type: string + arangodb_license: + type: string + arangodb_version: + type: string + executor: python-vm steps: - checkout - - run: - name: Set Up ArangoDB + name: Setup ArangoDB command: | chmod +x starter.sh ./starter.sh << parameters.arangodb_config >> << parameters.arangodb_license >> << parameters.arangodb_version >> - - restore_cache: key: pip-and-local-cache - - # TODO: Revisit this bottleneck - run: name: Setup Python command: | pyenv --version pyenv install -f << parameters.python_version >> pyenv global << parameters.python_version >> - - run: - name: "Install Dependencies" + name: Install Dependencies command: pip install -e .[dev] - - run: docker ps -a - - run: docker logs arango - - run: - name: "Run pytest" + name: Run pytest command: | mkdir test-results + mkdir htmlcov args=("--junitxml=test-results/junit.xml" "--log-cli-level=DEBUG" "--host" "localhost" "--port=8529") if [ << parameters.arangodb_config >> = "cluster" ]; then args+=("--cluster" "--port=8539" "--port=8549") fi - if [ << parameters.arangodb_license >> = "enterprise" ]; then - args+=("--enterprise") + if [ << parameters.arangodb_license >> != "enterprise" ]; then + args+=("--skip" "enterprise") fi echo "Running pytest with args: ${args[@]}" - pytest --cov=arango --cov-report=xml --cov-report term-missing --color=yes --code-highlight=yes "${args[@]}" - + pytest --cov=arango --cov-report=html:htmlcov --color=yes --code-highlight=yes "${args[@]}" + - store_artifacts: + path: htmlcov + destination: coverage-report - store_artifacts: path: test-results - - store_test_results: path: test-results - - - codecov/upload: - file: coverage.xml diff --git a/docs/contributing.rst b/docs/contributing.rst index 2093f72f..09332329 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -59,7 +59,7 @@ To run the test suite (use your own host, port and root password): ~$ pip install pytest ~$ git clone https://github.com/arangodb/python-arango.git ~$ cd python-arango - ~$ py.test --complete --host=127.0.0.1 --port=8529 --passwd=passwd + ~$ pytest --cluster --host=127.0.0.1 --port=8529 --password=passwd To run the test suite with coverage report: @@ -68,7 +68,7 @@ To run the test suite with coverage report: ~$ pip install coverage pytest pytest-cov ~$ git clone https://github.com/arangodb/python-arango.git ~$ cd python-arango - ~$ py.test --complete --host=127.0.0.1 --port=8529 --passwd=passwd --cov=kq + ~$ pytest --cluster --host=127.0.0.1 --port=8529 --password=passwd --cov=kq As the test suite creates real databases and jobs, it should only be run in development environments. diff --git a/pyproject.toml b/pyproject.toml index 8ed943be..c7bf486d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ dev = [ "sphinx", "sphinx_rtd_theme", "types-requests", + "allure-pytest>=2.15", "types-setuptools", ] diff --git a/tests/conftest.py b/tests/conftest.py index ee5a0cd3..a6068056 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -39,7 +39,9 @@ class GlobalData: cluster: bool = None complete: bool = None replication: bool = None - enterprise: bool = None + skip: list[str] = None + foxx_path: str = None + backup_path: str = None secret: str = None root_password: str = None db_version: version = version.parse("0.0.0") @@ -49,24 +51,70 @@ class GlobalData: def pytest_addoption(parser): - parser.addoption("--host", action="store", default="127.0.0.1") - parser.addoption("--port", action="append", default=None) - parser.addoption("--passwd", action="store", default="passwd") - parser.addoption("--complete", action="store_true") - parser.addoption("--cluster", action="store_true") - parser.addoption("--replication", action="store_true") - parser.addoption("--enterprise", action="store_true") - parser.addoption("--secret", action="store", default="secret") + parser.addoption( + "--host", action="store", default="127.0.0.1", help="ArangoDB host address" + ) + parser.addoption( + "--port", action="append", default=None, help="ArangoDB coordinator ports" + ) + parser.addoption( + "--root", action="store", default="root", help="ArangoDB root user" + ) + parser.addoption( + "--password", action="store", default="passwd", help="ArangoDB password" + ) + parser.addoption( + "--secret", action="store", default="secret", help="ArangoDB JWT secret" + ) + parser.addoption( + "--cluster", action="store_true", help="Run tests in a cluster setup" + ) + parser.addoption( + "--complete", + action="store_true", + help="Run extra async and transaction tests (not supported)", + ) + parser.addoption("--replication", action="store_true", help="Run replication tests") + parser.addoption( + "--foxx-path", + action="store", + default="/tests/static/service.zip", + help="Foxx tests service path", + ) + parser.addoption( + "--backup-path", + action="store", + default="local://tmp", + help="Backup tests repository path", + ) + parser.addoption( + "--skip", + action="store", + nargs="*", + choices=[ + "backup", # backup tests + "batch", # batch API tests (deprecated) + "jwt-secret-keyfile", # server was not configured with a keyfile + "foxx", # foxx is not supported + "js-transactions", # javascript transactions are not supported + "task", # tasks API + "enterprise", # skip what used to be "enterprise-only" before 3.12 + ], + default=[], + help="Skip specific tests", + ) def pytest_configure(config): ports = config.getoption("port") if ports is None: ports = ["8529"] - hosts = [f"http://{config.getoption('host')}:{p}" for p in ports] + hosts = [f"http://{config.getoption('host')}:{p}" for p in ports] # noqa: E231 url = hosts[0] secret = config.getoption("secret") cluster = config.getoption("cluster") + root_password = config.getoption("password") + root_user = config.getoption("root") host_resolver = "fallback" http_client = DefaultHTTPClient(request_timeout=120) @@ -76,8 +124,8 @@ def pytest_configure(config): ) sys_db = client.db( name="_system", - username="root", - password=config.getoption("passwd"), + username=root_user, + password=root_password, superuser_token=generate_jwt(secret), verify=True, ) @@ -148,9 +196,11 @@ def pytest_configure(config): global_data.cluster = cluster global_data.complete = config.getoption("complete") global_data.replication = config.getoption("replication") - global_data.enterprise = config.getoption("enterprise") global_data.secret = secret - global_data.root_password = config.getoption("passwd") + global_data.root_password = root_password + global_data.skip = config.getoption("skip") + global_data.backup_path = config.getoption("backup_path") + global_data.foxx_path = config.getoption("foxx_path") # noinspection PyShadowingNames @@ -186,7 +236,7 @@ def pytest_unconfigure(*_): # pragma: no cover sys_db.delete_collection(col_name, ignore_missing=True) # # Remove all backups. - if global_data.enterprise: + if "backup" not in global_data.skip and "enterprise" not in global_data.skip: for backup_id in sys_db.backup.get()["list"].keys(): sys_db.backup.delete(backup_id) @@ -223,16 +273,6 @@ def pytest_generate_tests(metafunc): bad_async_db._executor = TestAsyncApiExecutor(bad_conn) bad_dbs.append(bad_async_db) - # Skip test batch databases, as they are deprecated. - """ - tst_batch_db = StandardDatabase(tst_conn) - tst_batch_db._executor = TestBatchExecutor(tst_conn) - tst_dbs.append(tst_batch_db) - bad_batch_bdb = StandardDatabase(bad_conn) - bad_batch_bdb._executor = TestBatchExecutor(bad_conn) - bad_dbs.append(bad_batch_bdb) - """ - if "db" in metafunc.fixturenames and "bad_db" in metafunc.fixturenames: metafunc.parametrize("db,bad_db", zip(tst_dbs, bad_dbs)) @@ -431,11 +471,21 @@ def replication(): return global_data.replication -@pytest.fixture(autouse=False) -def enterprise(): - return global_data.enterprise - - @pytest.fixture(autouse=False) def secret(): return global_data.secret + + +@pytest.fixture +def backup_path(): + return global_data.backup_path + + +@pytest.fixture +def foxx_path(): + return global_data.foxx_path + + +@pytest.fixture +def skip_tests(): + return global_data.skip diff --git a/tests/static/cluster-3.11.conf b/tests/static/cluster-3.11.conf deleted file mode 100644 index 86f78556..00000000 --- a/tests/static/cluster-3.11.conf +++ /dev/null @@ -1,14 +0,0 @@ -[starter] -mode = cluster -local = true -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.database.extended-names = true -all.log.api-enabled = true -all.javascript.allow-admin-execute = true diff --git a/tests/static/setup.sh b/tests/static/setup.sh deleted file mode 100644 index 0d2189ba..00000000 --- a/tests/static/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -mkdir -p /tests/static -wget -O /tests/static/service.zip "http://localhost:8000/$PROJECT/tests/static/service.zip" -wget -O /tests/static/keyfile "http://localhost:8000/$PROJECT/tests/static/keyfile" -wget -O /tests/static/arangodb.conf "http://localhost:8000/$PROJECT/tests/static/$ARANGODB_CONF" -arangodb --configuration=/tests/static/arangodb.conf diff --git a/tests/static/single-3.11.conf b/tests/static/single-3.11.conf deleted file mode 100644 index df45cb76..00000000 --- a/tests/static/single-3.11.conf +++ /dev/null @@ -1,12 +0,0 @@ -[starter] -mode = single -address = 0.0.0.0 -port = 8528 - -[auth] -jwt-secret = /tests/static/keyfile - -[args] -all.database.password = passwd -all.database.extended-names = true -all.javascript.allow-admin-execute = true diff --git a/tests/test_analyzer.py b/tests/test_analyzer.py index a7573bd6..6e6744e7 100644 --- a/tests/test_analyzer.py +++ b/tests/test_analyzer.py @@ -9,7 +9,7 @@ from tests.helpers import assert_raises, generate_analyzer_name -def test_analyzer_management(db, bad_db, cluster, enterprise, db_version): +def test_analyzer_management(db, bad_db, skip_tests, db_version): analyzer_name = generate_analyzer_name() full_analyzer_name = db.name + "::" + analyzer_name bad_analyzer_name = generate_analyzer_name() @@ -70,7 +70,7 @@ def test_analyzer_management(db, bad_db, cluster, enterprise, db_version): assert db.delete_analyzer(analyzer_name, ignore_missing=True) is False # Test create geo_s2 analyzer (EE only) - if enterprise: + if "enterprise" not in skip_tests: analyzer_name = generate_analyzer_name() result = db.create_analyzer(analyzer_name, "geo_s2", {}) assert result["type"] == "geo_s2" diff --git a/tests/test_aql.py b/tests/test_aql.py index f4074d63..2672887c 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -254,8 +254,8 @@ def test_aql_query_management(db_version, db, sys_db, bad_db, col, docs): assert err.value.error_code in {11, 1228} -def test_aql_query_force_one_shard_attribute_value(db, db_version, enterprise, cluster): - if not enterprise or not cluster: +def test_aql_query_force_one_shard_attribute_value(db, skip_tests, cluster): + if "enterprise" in skip_tests or not cluster: return name = generate_col_name() diff --git a/tests/test_backup.py b/tests/test_backup.py index e030c5e6..00470758 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -12,9 +12,11 @@ from tests.helpers import assert_raises -def test_backup_management(sys_db, bad_db, enterprise, cluster): - if not enterprise: +def test_backup_management(sys_db, bad_db, cluster, skip_tests): + if "enterprise" in skip_tests: pytest.skip("Only for ArangoDB enterprise edition") + if "backup" in skip_tests: + pytest.skip("Skipping backup tests") # Test create backup "foo". result = sys_db.backup.create( diff --git a/tests/test_client.py b/tests/test_client.py index a196a8fd..4da61e47 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -19,21 +19,21 @@ ) -def test_client_attributes(): +def test_client_attributes(url): http_client = DefaultHTTPClient() - client = ArangoClient(hosts="http://127.0.0.1:8529", http_client=http_client) + client = ArangoClient(hosts=url, http_client=http_client) assert client.version == importlib_metadata.version("python-arango") - assert client.hosts == ["http://127.0.0.1:8529"] + assert client.hosts == [url] - assert repr(client) == "" + assert repr(client) == f"" assert isinstance(client._host_resolver, SingleHostResolver) - client_repr = "" - client_hosts = ["http://127.0.0.1:8529", "http://localhost:8529"] + client_repr = f"" # noqa: E231 + client_hosts = [url, url] client = ArangoClient( - hosts="http://127.0.0.1:8529,http://localhost" ":8529", + hosts=f"{url},{url}", # noqa: E231 http_client=http_client, serializer=json.dumps, deserializer=json.loads, @@ -59,8 +59,8 @@ def test_client_attributes(): assert client.request_timeout == client._http.request_timeout == 120 -def test_client_good_connection(db, username, password): - client = ArangoClient(hosts="http://127.0.0.1:8529") +def test_client_good_connection(db, username, password, url): + client = ArangoClient(hosts=url) # Test connection with verify flag on and off for verify in (True, False): @@ -71,8 +71,8 @@ def test_client_good_connection(db, username, password): assert db.context == "default" -def test_client_bad_connection(db, username, password, cluster): - client = ArangoClient(hosts="http://127.0.0.1:8529") +def test_client_bad_connection(db, username, password, cluster, url): + client = ArangoClient(hosts=url) bad_db_name = generate_db_name() bad_username = generate_username() @@ -94,7 +94,7 @@ def test_client_bad_connection(db, username, password, cluster): assert "bad connection" in str(err.value) -def test_client_http_client_attributes(db, username, password): +def test_client_http_client_attributes(db, username, password, url): http_client = DefaultHTTPClient( request_timeout=80, retry_attempts=5, @@ -103,15 +103,13 @@ def test_client_http_client_attributes(db, username, password): pool_maxsize=12, pool_timeout=120, ) - client = ArangoClient( - hosts="http://127.0.0.1:8529", http_client=http_client, request_timeout=30 - ) + client = ArangoClient(hosts=url, http_client=http_client, request_timeout=30) client.db(db.name, username, password, verify=True) assert http_client.request_timeout == 80 assert client.request_timeout == http_client.request_timeout -def test_client_custom_http_client(db, username, password): +def test_client_custom_http_client(db, username, password, url): # Define custom HTTP client which increments the counter on any API call. class MyHTTPClient(DefaultHTTPClient): def __init__(self) -> None: @@ -127,13 +125,13 @@ def send_request( ) http_client = MyHTTPClient() - client = ArangoClient(hosts="http://127.0.0.1:8529", http_client=http_client) + client = ArangoClient(hosts=url, http_client=http_client) # Set verify to True to send a test API call on initialization. client.db(db.name, username, password, verify=True) assert http_client.counter == 1 -def test_client_override_validate() -> None: +def test_client_override_validate(url) -> None: # custom http client that manipulates the underlying session class MyHTTPClient(DefaultHTTPClient): def __init__(self, verify: Union[None, bool, str]) -> None: @@ -152,7 +150,7 @@ def assert_verify( ) -> None: http_client = MyHTTPClient(verify=http_client_verify) client = ArangoClient( - hosts="http://127.0.0.1:8529", + hosts=url, http_client=http_client, verify_override=arango_override, ) @@ -184,14 +182,14 @@ def assert_verify( assert_verify("test", "foo", "foo") -def test_can_serialize_deserialize_client() -> None: - client = ArangoClient(hosts="http://127.0.0.1:8529") +def test_can_serialize_deserialize_client(url) -> None: + client = ArangoClient(hosts=url) client_pstr = pickle.dumps(client) client2 = pickle.loads(client_pstr) assert len(client2._sessions) > 0 -def test_client_compression(db, username, password): +def test_client_compression(db, username, password, url): class CheckCompression: def __init__(self, should_compress: bool): self.should_compress = should_compress @@ -219,7 +217,7 @@ def send_request( # should not compress, as threshold is 0 client = ArangoClient( - hosts="http://127.0.0.1:8529", + hosts=url, http_client=MyHTTPClient(compression_checker=checker), response_compression="gzip", ) @@ -230,7 +228,7 @@ def send_request( # should not compress, as size of payload is less than threshold checker = CheckCompression(should_compress=False) client = ArangoClient( - hosts="http://127.0.0.1:8529", + hosts=url, http_client=MyHTTPClient(compression_checker=checker), request_compression=DeflateRequestCompression(250, level=7), response_compression="deflate", diff --git a/tests/test_collection.py b/tests/test_collection.py index c11a6541..c1b2c2f0 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -199,7 +199,8 @@ def test_collection_management(db, bad_db, cluster): assert key_options["key_generator"] == "autoincrement" assert key_options["key_increment"] == 9 assert key_options["key_offset"] == 100 - db.delete_collection(col_name) + + col_name = generate_col_name() col = db.create_collection( name=col_name, @@ -338,9 +339,9 @@ def test_collection_utf8(db, special_collection_names): # Not sure if this belongs in here or in `test_database.py`... def test_database_and_collection_utf8( - sys_db, special_collection_names, special_db_names + sys_db, special_collection_names, special_db_names, url ): - client = ArangoClient(hosts="http://127.0.0.1:8529") + client = ArangoClient(hosts=url) for db_name in special_db_names: username = generate_username() password = generate_string() diff --git a/tests/test_database.py b/tests/test_database.py index 1006df3b..1b9cf958 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -311,7 +311,7 @@ def test_database_misc_methods(client, sys_db, db, bad_db, cluster, secret, db_v assert isinstance(sys_db.reset_log_levels(server_id), dict) result = sys_db.reset_log_levels() - assert result == default_log_levels + assert result.keys() == default_log_levels.keys() with assert_raises(ServerLogLevelResetError): bad_db.reset_log_levels() @@ -459,7 +459,7 @@ def test_database_utf8(sys_db, special_db_names): assert sys_db.delete_database(name) -def test_license(sys_db, enterprise, db_version): +def test_license(sys_db, skip_tests, db_version): license = sys_db.license() assert isinstance(license, dict) @@ -474,7 +474,7 @@ def test_license(sys_db, enterprise, db_version): "status", } - if enterprise: + if "enterprise" not in skip_tests: assert set(license.keys()) == expected_keys else: assert license == {"license": "none"} diff --git a/tests/test_foxx.py b/tests/test_foxx.py index b096d2e8..892cfc60 100644 --- a/tests/test_foxx.py +++ b/tests/test_foxx.py @@ -29,16 +29,19 @@ from arango.foxx import Foxx from tests.helpers import assert_raises, extract, generate_service_mount -service_file = "/tests/static/service.zip" service_name = "test" -def test_foxx_attributes(db): +def test_foxx_attributes(db, skip_tests): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") assert isinstance(db.foxx, Foxx) assert repr(db.foxx) == f"" -def test_foxx_service_management_json(db, bad_db, cluster): +def test_foxx_service_management_json(db, bad_db, cluster, skip_tests, foxx_path): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -62,7 +65,7 @@ def test_foxx_service_management_json(db, bad_db, cluster): # Test create service service = db.foxx.create_service( mount=service_mount, - source=service_file, + source=foxx_path, config={}, dependencies={}, development=True, @@ -101,7 +104,7 @@ def test_foxx_service_management_json(db, bad_db, cluster): # Test update service service = db.foxx.update_service( mount=service_mount, - source=service_file, + source=foxx_path, config={}, dependencies={}, teardown=True, @@ -121,7 +124,7 @@ def test_foxx_service_management_json(db, bad_db, cluster): # Test replace service service = db.foxx.replace_service( mount=service_mount, - source=service_file, + source=foxx_path, config={}, dependencies={}, teardown=True, @@ -147,7 +150,9 @@ def test_foxx_service_management_json(db, bad_db, cluster): assert err.value.error_code == 3009 -def test_foxx_service_management_file(db, cluster): +def test_foxx_service_management_file(db, cluster, skip_tests): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -235,7 +240,9 @@ def test_foxx_service_management_file(db, cluster): assert service_mount not in extract("mount", db.foxx.services()) -def test_foxx_config_management(db, cluster): +def test_foxx_config_management(db, cluster, skip_tests, foxx_path): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -245,7 +252,7 @@ def test_foxx_config_management(db, cluster): # Prep the test service db.foxx.create_service( mount=service_mount, - source=service_file, + source=foxx_path, config={}, ) @@ -274,7 +281,9 @@ def test_foxx_config_management(db, cluster): assert err.value.error_code == 3009 -def test_foxx_dependency_management(db, cluster): +def test_foxx_dependency_management(db, cluster, skip_tests, foxx_path): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -282,7 +291,7 @@ def test_foxx_dependency_management(db, cluster): missing_mount = generate_service_mount() # Prep the test service - db.foxx.create_service(mount=service_mount, source=service_file, dependencies={}) + db.foxx.create_service(mount=service_mount, source=foxx_path, dependencies={}) # Test get service dependencies assert db.foxx.dependencies(service_mount) == {} @@ -309,7 +318,9 @@ def test_foxx_dependency_management(db, cluster): assert err.value.error_code == 3009 -def test_foxx_development_toggle(db, cluster): +def test_foxx_development_toggle(db, cluster, skip_tests, foxx_path): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -319,7 +330,7 @@ def test_foxx_development_toggle(db, cluster): # Prep the test service db.foxx.create_service( mount=service_mount, - source=service_file, + source=foxx_path, development=False, ) @@ -346,7 +357,9 @@ def test_foxx_development_toggle(db, cluster): assert err.value.error_code == 3009 -def test_foxx_misc_functions(db, bad_db, cluster): +def test_foxx_misc_functions(db, bad_db, cluster, skip_tests, foxx_path): + if "foxx" in skip_tests: + pytest.skip("Skipping foxx tests") if cluster: pytest.skip("Not tested in a cluster setup") @@ -356,7 +369,7 @@ def test_foxx_misc_functions(db, bad_db, cluster): # Prep the test service db.foxx.create_service( mount=service_mount, - source=service_file, + source=foxx_path, ) # Test get service readme diff --git a/tests/test_index.py b/tests/test_index.py index a5d0f5eb..04708d9d 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -250,7 +250,7 @@ def test_add_ttl_index(icol): icol.delete_index(result["id"]) -def test_add_inverted_index(icol, enterprise): +def test_add_inverted_index(icol, skip_tests): parameters = dict( fields=[{"name": "attr1", "cache": True}], name="c0_cached", @@ -261,7 +261,7 @@ def test_add_inverted_index(icol, enterprise): ) expected_keys = ["primarySort", "analyzer", "includeAllFields", "searchField"] - if enterprise: + if "enterprise" not in skip_tests: parameters["cache"] = True parameters["primaryKeyCache"] = True expected_keys.extend(["cache", "primaryKeyCache"]) diff --git a/tests/test_task.py b/tests/test_task.py index 85837a4e..9bfa48f6 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -1,3 +1,5 @@ +import pytest + from arango.exceptions import ( TaskCreateError, TaskDeleteError, @@ -7,7 +9,10 @@ from tests.helpers import assert_raises, extract, generate_task_id, generate_task_name -def test_task_management(sys_db, db, bad_db): +def test_task_management(sys_db, db, bad_db, skip_tests): + if "task" in skip_tests: + pytest.skip("Skipping task tests") + test_command = 'require("@arangodb").print(params);' # Test create task with random ID diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 75ec28a2..15bf5e6d 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -13,7 +13,10 @@ from tests.helpers import extract, generate_db_name -def test_transaction_execute_raw(db, col, docs): +def test_transaction_execute_raw(db, col, docs, skip_tests): + if "js-transactions" in skip_tests: + pytest.skip("Skipping JS transaction tests") + # Test execute raw transaction doc = docs[0] key = doc["_key"] diff --git a/tests/test_view.py b/tests/test_view.py index 778f87e6..4aae2946 100644 --- a/tests/test_view.py +++ b/tests/test_view.py @@ -12,7 +12,7 @@ from tests.helpers import assert_raises, generate_view_name -def test_view_management(db, bad_db, col, cluster, db_version, enterprise): +def test_view_management(db, bad_db, col, cluster, db_version, skip_tests): # Test create view view_name = generate_view_name() bad_view_name = generate_view_name() @@ -124,7 +124,7 @@ def test_view_management(db, bad_db, col, cluster, db_version, enterprise): # Test delete missing view with ignore_missing set to True assert db.delete_view(view_name, ignore_missing=True) is False - if enterprise and db_version >= version.parse("3.12"): + if "enterprise" not in skip_tests and db_version >= version.parse("3.12"): res = db.create_view( view_name, view_type, @@ -194,11 +194,11 @@ def test_arangosearch_view_management(db, bad_db, cluster): assert db.delete_view(view_name, ignore_missing=False) is True -def test_arangosearch_view_properties(db, col, enterprise): +def test_arangosearch_view_properties(db, col, skip_tests): view_name = generate_view_name() params = {"consolidationIntervalMsec": 50000} - if enterprise: + if "enterprise" not in skip_tests: params.update( { "links": { @@ -221,7 +221,7 @@ def test_arangosearch_view_properties(db, col, enterprise): assert result["name"] == view_name assert result["type"].lower() == "arangosearch" - if enterprise: + if "enterprise" not in skip_tests: assert "links" in result assert col.name in result["links"] From c60f4ea3ef3bc39124ff6a91b1d94ccaeeeb6b17 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 22 Dec 2025 15:46:29 +0800 Subject: [PATCH 27/33] Access Tokens Support (#383) * Access tokens * Bump driver version --- arango/database.py | 86 ++++++++++++++++++++++++++++++++++++++++++++ arango/exceptions.py | 17 +++++++++ arango/request.py | 2 +- tests/helpers.py | 9 +++++ tests/test_auth.py | 56 ++++++++++++++++++++++++++++- 5 files changed, 168 insertions(+), 2 deletions(-) diff --git a/arango/database.py b/arango/database.py index 130b7b5a..48a032d7 100644 --- a/arango/database.py +++ b/arango/database.py @@ -19,6 +19,9 @@ from arango.connection import Connection from arango.errno import HTTP_NOT_FOUND from arango.exceptions import ( + AccessTokenCreateError, + AccessTokenDeleteError, + AccessTokenListError, AnalyzerCreateError, AnalyzerDeleteError, AnalyzerGetError, @@ -1158,6 +1161,89 @@ def response_handler(resp: Response) -> Json: return self._execute(request, response_handler) + def create_access_token( + self, + user: str, + name: str, + valid_until: int, + ) -> Result[Json]: + """Create an access token for the given user. + + :param user: The name of the user. + :type user: str + :param name: A name for the access token to make identification easier, + like a short description. + :type name: str + :param valid_until: A Unix timestamp in seconds to set the expiration + date and time. + :type valid_until: int + + :return: Information about the created access token, including the token itself. + :rtype: dict + + :raise arango.exceptions.AccessTokenCreateError: If the operations fails. + """ + data: Json = { + "name": name, + "valid_until": valid_until, + } + + request = Request( + method="post", + endpoint=f"/_api/token/{user}", + data=data, + ) + + def response_handler(resp: Response) -> Json: + if not resp.is_success: + raise AccessTokenCreateError(resp, request) + result: Json = resp.body + return result + + return self._executor.execute(request, response_handler) + + def delete_access_token(self, user: str, token_id: int) -> Result[None]: + """Delete an access token for the given user. + + :param user: The name of the user. + :type user: str + :param token_id: The ID of the access token to delete. + :type token_id: int + + :raise arango.exceptions.AccessTokenDeleteError: If the operation fails. + """ + request = Request( + method="delete", + endpoint=f"/_api/token/{user}/{token_id}", + ) + + def response_handler(resp: Response) -> None: + if not resp.is_success: + raise AccessTokenDeleteError(resp, request) + + return self._executor.execute(request, response_handler) + + def list_access_tokens(self, user: str) -> Result[Jsons]: + """List all access tokens for the given user. + + :param user: The name of the user. + :type user: str + + :return: List of access tokens for the user. + :rtype: list + + :raise arango.exceptions.AccessTokenListError: If the operation fails. + """ + request = Request(method="get", endpoint=f"/_api/token/{user}") + + def response_handler(resp: Response) -> Jsons: + if not resp.is_success: + raise AccessTokenListError(resp, request) + result: Jsons = resp.body["tokens"] + return result + + return self._executor.execute(request, response_handler) + def tls(self) -> Result[Json]: """Return TLS data (server key, client-auth CA). diff --git a/arango/exceptions.py b/arango/exceptions.py index 7fc62983..77e00d39 100644 --- a/arango/exceptions.py +++ b/arango/exceptions.py @@ -161,6 +161,23 @@ class AQLQueryRulesGetError(ArangoServerError): """Failed to retrieve AQL query rules.""" +####################### +# Access Token Errors # +####################### + + +class AccessTokenCreateError(ArangoServerError): + """Failed to create an access token.""" + + +class AccessTokenDeleteError(ArangoServerError): + """Failed to delete an access token.""" + + +class AccessTokenListError(ArangoServerError): + """Failed to retrieve access tokens.""" + + ############################## # Async Execution Exceptions # ############################## diff --git a/arango/request.py b/arango/request.py index 5a213f68..66fb26ad 100644 --- a/arango/request.py +++ b/arango/request.py @@ -12,7 +12,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.4" + driver_version = "8.2.5" driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/tests/helpers.py b/tests/helpers.py index ef25a786..b6fa76ce 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -108,6 +108,15 @@ def generate_service_mount(): return f"/test_{uuid4().hex}" +def generate_token_name(): + """Generate and return a random token name. + + :return: Random token name. + :rtype: str + """ + return f"test_token_{uuid4().hex}" + + def generate_jwt(secret, exp=3600): """Generate and return a JWT. diff --git a/tests/test_auth.py b/tests/test_auth.py index 0f747563..9a869512 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,6 +1,11 @@ +import time + from arango.connection import BasicConnection, JwtConnection, JwtSuperuserConnection from arango.errno import FORBIDDEN, HTTP_UNAUTHORIZED from arango.exceptions import ( + AccessTokenCreateError, + AccessTokenDeleteError, + AccessTokenListError, JWTAuthError, JWTExpiredError, JWTSecretListError, @@ -11,7 +16,12 @@ ServerTLSReloadError, ServerVersionError, ) -from tests.helpers import assert_raises, generate_jwt, generate_string +from tests.helpers import ( + assert_raises, + generate_jwt, + generate_string, + generate_token_name, +) def test_auth_invalid_method(client, db_name, username, password): @@ -155,3 +165,47 @@ def test_auth_jwt_expiry(client, db_name, root_password, secret): db = client.db("_system", user_token=valid_token) with assert_raises(JWTExpiredError) as err: db.conn.set_token(expired_token) + + +def test_auth_access_token(client, db_name, username, password, bad_db): + # Login with basic auth + db_auth_basic = client.db( + name=db_name, + username=username, + password=password, + verify=True, + auth_method="basic", + ) + + # Create an access token + token_name = generate_token_name() + token = db_auth_basic.create_access_token( + user=username, name=token_name, valid_until=int(time.time() + 3600) + ) + assert token["active"] is True + + # Cannot create a token with the same name + with assert_raises(AccessTokenCreateError): + db_auth_basic.create_access_token( + user=username, name=token_name, valid_until=int(time.time() + 3600) + ) + + # Authenticate with the created token + access_token_db = client.db( + name=db_name, + username=username, + password=token["token"], + verify=True, + auth_method="basic", + ) + + # List access tokens + tokens = access_token_db.list_access_tokens(username) + assert isinstance(tokens, list) + with assert_raises(AccessTokenListError): + bad_db.list_access_tokens(username) + + # Clean up + access_token_db.delete_access_token(username, token["id"]) + with assert_raises(AccessTokenDeleteError): + access_token_db.delete_access_token(username, token["id"]) From 629b3d8312cf6f598c614c7892262265d3e3605a Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Tue, 20 Jan 2026 14:54:13 +0800 Subject: [PATCH 28/33] No longer using setuptools and importlib as direct dependencies (#384) * No longer using setuptools and importlib as direct dependencies * Fixed version issues * Fixin version issues --- .gitignore | 3 --- .pre-commit-config.yaml | 4 ++-- arango/client.py | 6 ++---- arango/collection.py | 14 ++++---------- arango/request.py | 3 ++- arango/version.py | 1 + docs/requirements.txt | 1 - pyproject.toml | 11 ++++++----- tests/test_aql.py | 4 +--- tests/test_client.py | 8 ++++---- tests/test_cursor.py | 4 +--- 11 files changed, 23 insertions(+), 36 deletions(-) create mode 100644 arango/version.py diff --git a/.gitignore b/.gitignore index 4fa6f46d..3494dcdb 100644 --- a/.gitignore +++ b/.gitignore @@ -119,9 +119,6 @@ node_modules/ .envrc .direnv/ -# setuptools_scm -arango/version.py - # test results *_results.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8098f23e..3da77633 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 26.1.0 hooks: - id: black @@ -29,7 +29,7 @@ repos: args: [ --profile, black ] - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 7.3.0 hooks: - id: flake8 diff --git a/arango/client.py b/arango/client.py index b56755b0..627722e5 100644 --- a/arango/client.py +++ b/arango/client.py @@ -3,8 +3,6 @@ from json import dumps, loads from typing import Any, Callable, Optional, Sequence, Union -import importlib_metadata - from arango.connection import ( BasicConnection, Connection, @@ -27,6 +25,7 @@ RoundRobinHostResolver, SingleHostResolver, ) +from arango.version import __version__ def default_serializer(x: Any) -> str: @@ -175,8 +174,7 @@ def version(self) -> str: :return: Client version. :rtype: str """ - version: str = importlib_metadata.version("python-arango") - return version + return __version__ @property def request_timeout(self) -> Any: diff --git a/arango/collection.py b/arango/collection.py index 2b13884a..0f43f397 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -871,9 +871,7 @@ def find_near( query = """ FOR doc IN NEAR(@collection, @latitude, @longitude{}) RETURN doc - """.format( - "" if limit is None else ", @limit " - ) + """.format("" if limit is None else ", @limit ") bind_vars = { "collection": self._name, @@ -996,9 +994,7 @@ def find_in_radius( query = """ FOR doc IN WITHIN(@@collection, @latitude, @longitude, @radius{}) RETURN doc - """.format( - "" if distance_field is None else ", @distance" - ) + """.format("" if distance_field is None else ", @distance") bind_vars = { "@collection": self._name, @@ -1080,7 +1076,7 @@ def build_coord_str_from_index(index: Json) -> str: coord_str = "" if index is None: # Find the first geo index - for collection_index in self.indexes(): # type:ignore[union-attr] + for collection_index in self.indexes(): # type: ignore[union-attr] if collection_index["type"] == "geo": coord_str = build_coord_str_from_index(collection_index) break @@ -1168,9 +1164,7 @@ def find_by_text( aql = """ FOR doc IN FULLTEXT(@collection, @field, @query{}) RETURN doc - """.format( - "" if limit is None else ", @limit" - ) + """.format("" if limit is None else ", @limit") request = Request( method="post", diff --git a/arango/request.py b/arango/request.py index 66fb26ad..d9ddd83b 100644 --- a/arango/request.py +++ b/arango/request.py @@ -3,6 +3,7 @@ from typing import Any, MutableMapping, Optional from arango.typings import DriverFlags, Fields, Headers, Params +from arango.version import __version__ def normalize_headers( @@ -12,7 +13,7 @@ def normalize_headers( if driver_flags is not None: for flag in driver_flags: flags = flags + flag + ";" - driver_version = "8.2.5" + driver_version = __version__ driver_header = "python-arango/" + driver_version + " (" + flags + ")" normalized_headers: Headers = { "charset": "utf-8", diff --git a/arango/version.py b/arango/version.py new file mode 100644 index 00000000..a0c45555 --- /dev/null +++ b/arango/version.py @@ -0,0 +1 @@ +__version__ = "8.2.6" diff --git a/docs/requirements.txt b/docs/requirements.txt index 7d6d37d6..8f3bf12b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,3 @@ requests_toolbelt -importlib_metadata PyJWT sphinx_rtd_theme diff --git a/pyproject.toml b/pyproject.toml index c7bf486d..c4020c48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ keywords = ["arangodb", "python", "driver"] readme = "README.md" dynamic = ["version"] license = { file = "LICENSE" } -requires-python = ">=3.9" +requires-python = ">=3.10" classifiers = [ "Intended Audience :: Developers", @@ -39,15 +39,16 @@ dependencies = [ "requests", "requests_toolbelt", "PyJWT", - "setuptools>=42", - "importlib_metadata>=4.7.1", "packaging>=23.1", ] +[tool.setuptools.dynamic] +version = { attr = "arango.version.__version__" } + [project.optional-dependencies] dev = [ - "black>=22.3.0", - "flake8>=4.0.1", + "black==26.1.0", + "flake8==7.3.0", "isort>=5.10.1", "mypy>=0.942", "mock", diff --git a/tests/test_aql.py b/tests/test_aql.py index 2672887c..b2defa01 100644 --- a/tests/test_aql.py +++ b/tests/test_aql.py @@ -110,9 +110,7 @@ def test_aql_query_management(db_version, db, sys_db, bad_db, col, docs): FOR d IN {col} UPDATE {{_key: d._key, _val: @val }} IN {col} RETURN NEW - """.format( - col=col.name - ), + """.format(col=col.name), count=True, # batch_size=1, ttl=10, diff --git a/tests/test_client.py b/tests/test_client.py index 4da61e47..22078d29 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -2,7 +2,6 @@ import pickle from typing import Union -import importlib_metadata import pytest from requests import Session @@ -11,6 +10,7 @@ from arango.exceptions import ArangoClientError, ServerConnectionError from arango.http import DefaultHTTPClient, DeflateRequestCompression from arango.resolver import FallbackHostResolver, RandomHostResolver, SingleHostResolver +from arango.version import __version__ from tests.helpers import ( generate_col_name, generate_db_name, @@ -23,7 +23,7 @@ def test_client_attributes(url): http_client = DefaultHTTPClient() client = ArangoClient(hosts=url, http_client=http_client) - assert client.version == importlib_metadata.version("python-arango") + assert client.version == __version__ assert client.hosts == [url] assert repr(client) == f"" @@ -38,7 +38,7 @@ def test_client_attributes(url): serializer=json.dumps, deserializer=json.loads, ) - assert client.version == importlib_metadata.version("python-arango") + assert client.version == __version__ assert client.hosts == client_hosts assert repr(client) == client_repr assert isinstance(client._host_resolver, FallbackHostResolver) @@ -50,7 +50,7 @@ def test_client_attributes(url): serializer=json.dumps, deserializer=json.loads, ) - assert client.version == importlib_metadata.version("python-arango") + assert client.version == __version__ assert client.hosts == client_hosts assert repr(client) == client_repr assert isinstance(client._host_resolver, RandomHostResolver) diff --git a/tests/test_cursor.py b/tests/test_cursor.py index 184d7ed8..6a0ad720 100644 --- a/tests/test_cursor.py +++ b/tests/test_cursor.py @@ -113,9 +113,7 @@ def test_cursor_write_query(db, col, docs): FOR d IN {col} FILTER d._key == @first OR d._key == @second UPDATE {{_key: d._key, _val: @val }} IN {col} RETURN NEW - """.format( - col=col.name - ), + """.format(col=col.name), bind_vars={"first": "1", "second": "2", "val": 42}, count=True, batch_size=1, From 91faf705f8e89ac1d42d365b86806a47bcfaaa19 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Sun, 1 Feb 2026 19:52:09 +0800 Subject: [PATCH 29/33] Running backup tests only in a cluster setup (#385) * Running backup tests only in a cluster setup * Fix mypy version * Suppress mypy warnings * Suppress mypy warnings --- .pre-commit-config.yaml | 2 +- arango/connection.py | 4 ++-- pyproject.toml | 7 ++++--- tests/test_backup.py | 9 ++++++++- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3da77633..ebf33393 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,7 +34,7 @@ repos: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.991 + rev: v1.15.0 hooks: - id: mypy files: ^arango/ diff --git a/arango/connection.py b/arango/connection.py index 9384aef1..a8a16ef6 100644 --- a/arango/connection.py +++ b/arango/connection.py @@ -464,7 +464,7 @@ def set_token(self, token: str) -> None: "verify_iat": True, "verify_exp": True, "verify_signature": False, - }, + }, # type: ignore[arg-type] ) except ExpiredSignatureError: raise JWTExpiredError("JWT token is expired") @@ -554,7 +554,7 @@ def set_token(self, token: str) -> None: "verify_iat": True, "verify_exp": True, "verify_signature": False, - }, + }, # type: ignore[arg-type] ) except ExpiredSignatureError: raise JWTExpiredError("JWT token is expired") diff --git a/pyproject.toml b/pyproject.toml index c4020c48..3de11101 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ "urllib3>=1.26.0", "requests", "requests_toolbelt", - "PyJWT", + "PyJWT>=2.10.0", "packaging>=23.1", ] @@ -49,8 +49,8 @@ version = { attr = "arango.version.__version__" } dev = [ "black==26.1.0", "flake8==7.3.0", - "isort>=5.10.1", - "mypy>=0.942", + "isort==5.10.1", + "mypy==1.15.0", "mock", "pre-commit>=2.17.0", "pytest>=7.1.1", @@ -90,5 +90,6 @@ profile = "black" [tool.mypy] warn_return_any = true warn_unused_configs = true +warn_unused_ignores = false ignore_missing_imports = true strict = true diff --git a/tests/test_backup.py b/tests/test_backup.py index 00470758..9b157024 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -1,4 +1,5 @@ import pytest +from packaging import version from arango.errno import DATABASE_NOT_FOUND, FILE_NOT_FOUND, FORBIDDEN, HTTP_NOT_FOUND from arango.exceptions import ( @@ -12,11 +13,17 @@ from tests.helpers import assert_raises -def test_backup_management(sys_db, bad_db, cluster, skip_tests): +def test_backup_management(sys_db, bad_db, cluster, skip_tests, db_version): if "enterprise" in skip_tests: pytest.skip("Only for ArangoDB enterprise edition") if "backup" in skip_tests: pytest.skip("Skipping backup tests") + if not cluster: + pytest.skip("For simplicity, the backup API is only tested in cluster setups") + if db_version < version.parse("3.12.0"): + pytest.skip( + "For simplicity, the backup API is only tested in the latest versions" + ) # Test create backup "foo". result = sys_db.backup.create( From 2655257331d4ff207162c43ba9e27205fda775b0 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Wed, 11 Feb 2026 10:24:00 +0800 Subject: [PATCH 30/33] Adding schema validation (#387) * Waiting for backup restore * Adding schema parameter validation * Updating docs * Updating schema validation * Updating error message * Fixing formatter * Bumping version number --- arango/collection.py | 2 ++ arango/database.py | 3 +++ arango/formatter.py | 2 +- arango/version.py | 2 +- docs/schema.rst | 5 +---- tests/test_backup.py | 5 +++++ tests/test_collection.py | 14 +++++++++++--- 7 files changed, 24 insertions(+), 9 deletions(-) diff --git a/arango/collection.py b/arango/collection.py index 0f43f397..c62840a9 100644 --- a/arango/collection.py +++ b/arango/collection.py @@ -400,6 +400,8 @@ def configure( if sync is not None: data["waitForSync"] = sync if schema is not None: + if not isinstance(schema, dict) or len(schema) == 0: + raise ValueError("schema parameter must be a non-empty dict") data["schema"] = schema if replication_factor is not None: data["replicationFactor"] = replication_factor diff --git a/arango/database.py b/arango/database.py index 48a032d7..2ba1129b 100644 --- a/arango/database.py +++ b/arango/database.py @@ -1668,6 +1668,7 @@ def create_collection( :return: Standard collection API wrapper. :rtype: arango.collection.StandardCollection :raise arango.exceptions.CollectionCreateError: If create fails. + :raise ValueError: If parameters are invalid. """ key_options: Json = {"type": key_generator, "allowUserKeys": user_keys} if key_generator == "autoincrement": @@ -1698,6 +1699,8 @@ def create_collection( if write_concern is not None: data["writeConcern"] = write_concern if schema is not None: + if not isinstance(schema, dict) or len(schema) == 0: + raise ValueError("schema parameter must be a non-empty dict") data["schema"] = schema if computedValues is not None: data["computedValues"] = computedValues diff --git a/arango/formatter.py b/arango/formatter.py index ebbb070c..eeb50e77 100644 --- a/arango/formatter.py +++ b/arango/formatter.py @@ -1112,7 +1112,7 @@ def format_backup_dbserver(body: Json) -> Json: :return: Formatted body. :rtype: dict """ - return {"status": body["Status"]} + return {"status": body.get("Status")} def format_backup_transfer(body: Json) -> Json: diff --git a/arango/version.py b/arango/version.py index a0c45555..ec400e65 100644 --- a/arango/version.py +++ b/arango/version.py @@ -1 +1 @@ -__version__ = "8.2.6" +__version__ = "8.3.0" diff --git a/docs/schema.rst b/docs/schema.rst index e56943e5..fdfeccfc 100644 --- a/docs/schema.rst +++ b/docs/schema.rst @@ -3,7 +3,7 @@ Schema Validation ArangoDB supports document validation using JSON schemas. You can use this feature by providing a schema during collection creation using the ``schema`` -parameter. +parameter. It must not be an empty ``dict```. **Example:** @@ -37,6 +37,3 @@ parameter. # Modify the schema. employees.configure(schema=my_schema) - - # Remove the schema. - employees.configure(schema={}) diff --git a/tests/test_backup.py b/tests/test_backup.py index 9b157024..1fea7ecd 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -1,3 +1,5 @@ +import time + import pytest from packaging import version @@ -110,6 +112,9 @@ def test_backup_management(sys_db, bad_db, cluster, skip_tests, db_version): result = sys_db.backup.restore(backup_id_foo) assert isinstance(result, dict) + # Wait for restore to complete + time.sleep(10) + # Test restore backup with bad database. with assert_raises(BackupRestoreError) as err: bad_db.backup.restore(backup_id_foo) diff --git a/tests/test_collection.py b/tests/test_collection.py index c1b2c2f0..0f0f2d3a 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -60,9 +60,13 @@ def test_collection_misc_methods(col, bad_col, cluster): } ] - properties = col.configure( - sync=not prev_sync, schema={}, computed_values=computed_values - ) + with pytest.raises(ValueError): + # schema must not be empty + properties = col.configure( + sync=not prev_sync, schema={}, computed_values=computed_values + ) + + properties = col.configure(sync=not prev_sync, computed_values=computed_values) assert properties["name"] == col.name assert properties["system"] is False @@ -202,6 +206,10 @@ def test_collection_management(db, bad_db, cluster): col_name = generate_col_name() + with pytest.raises(ValueError): + # schema must not be empty + db.create_collection(name=col_name, schema={}) + col = db.create_collection( name=col_name, sync=True, From c85aa1590d155f56f8a6605244aee63fb5b37dff Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Fri, 13 Feb 2026 22:04:28 +0800 Subject: [PATCH 31/33] Emitting a warning instead of crashing (#388) * Emitting a warning instead of crashing * Adding driver option --- tests/conftest.py | 59 ++++++++++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 21 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a6068056..1483ce91 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import warnings from dataclasses import dataclass import pytest @@ -45,6 +46,7 @@ class GlobalData: secret: str = None root_password: str = None db_version: version = version.parse("0.0.0") + crash: bool = False global_data = GlobalData() @@ -69,6 +71,11 @@ def pytest_addoption(parser): parser.addoption( "--cluster", action="store_true", help="Run tests in a cluster setup" ) + parser.addoption( + "--crash", + action="store_true", + help="Crashes the tests on API keyword (for debugging)", + ) parser.addoption( "--complete", action="store_true", @@ -201,6 +208,7 @@ def pytest_configure(config): global_data.skip = config.getoption("skip") global_data.backup_path = config.getoption("backup_path") global_data.foxx_path = config.getoption("foxx_path") + global_data.crash = config.getoption("crash") # noinspection PyShadowingNames @@ -283,27 +291,6 @@ def pytest_generate_tests(metafunc): metafunc.parametrize("bad_db", bad_dbs) -@pytest.fixture(autouse=True) -def mock_formatters(monkeypatch): - def mock_verify_format(body, result): - body.pop("error", None) - body.pop("code", None) - result.pop("edge", None) - - # Remove all None values - # Sometimes they are expected to be excluded from the body (see computedValues) - result = {k: v for k, v in result.items() if v is not None} - body = {k: v for k, v in body.items() if v is not None} - - if len(body) != len(result): - before = sorted(body, key=lambda x: x.strip("_")) - after = sorted(result, key=lambda x: x.strip("_")) - raise ValueError(f"\nIN: {before}\nOUT: {after}") - return result - - monkeypatch.setattr(formatter, "verify_format", mock_verify_format) - - @pytest.fixture(autouse=False) def db_version(): return global_data.db_version @@ -489,3 +476,33 @@ def foxx_path(): @pytest.fixture def skip_tests(): return global_data.skip + + +@pytest.fixture +def crash_tests(): + return global_data.crash + + +@pytest.fixture(autouse=True) +def mock_formatters(monkeypatch, crash_tests): + def mock_verify_format(body, result): + body.pop("error", None) + body.pop("code", None) + result.pop("edge", None) + + # Remove all None values + # Sometimes they are expected to be excluded from the body (see computedValues) + result = {k: v for k, v in result.items() if v is not None} + body = {k: v for k, v in body.items() if v is not None} + + if len(body) != len(result): + before = sorted(body, key=lambda x: x.strip("_")) + after = sorted(result, key=lambda x: x.strip("_")) + if crash_tests: + raise ValueError(f"\nIN: {before}\nOUT: {after}") + else: + warnings.warn(f"\nIN: {before}\nOUT: {after}") + + return result + + monkeypatch.setattr(formatter, "verify_format", mock_verify_format) From eb89d69af058ef7cc2c69715d493f585cc5ba669 Mon Sep 17 00:00:00 2001 From: Wilfried Goesgens Date: Sun, 1 Mar 2026 16:25:18 +0100 Subject: [PATCH 32/33] Bug fix/wait for hot backup (#389) * workaround: give the cluster time to restore the hot backup * implement waiting for cluster resillience after hotbackup restore * Adapting the wait function * Adapting the wait function * Adapting the wait function * Adapting the wait function --------- Co-authored-by: Alex Petenchea --- tests/test_backup.py | 41 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) diff --git a/tests/test_backup.py b/tests/test_backup.py index 1fea7ecd..150b9e16 100644 --- a/tests/test_backup.py +++ b/tests/test_backup.py @@ -11,10 +11,48 @@ BackupGetError, BackupRestoreError, BackupUploadError, + ReplicationClusterInventoryError, ) from tests.helpers import assert_raises +def wait_for_cluster_resilient(sys_db): + collections_in_sync = False + max_attempts = 100 + + while not collections_in_sync and max_attempts > 0: + count_in_sync = 0 + count_still_waiting = 0 + + try: + inventory = sys_db.replication.cluster_inventory(include_system=True) + except ReplicationClusterInventoryError: + print("Failed to get cluster inventory, retrying...") + time.sleep(1) + max_attempts -= 1 + continue + + collections_in_sync = True + for col in inventory["collections"]: + if not col["all_in_sync"]: + count_still_waiting += 1 + collections_in_sync = False + else: + count_in_sync += 1 + + if not collections_in_sync: + if max_attempts % 50 == 0: + print(inventory) + print(f"In sync: {count_in_sync}") + print(f"Still not in sync: {count_still_waiting}") + time.sleep(1) + + max_attempts -= 1 + + if not collections_in_sync: + raise Exception("Collections didn't come in sync!") + + def test_backup_management(sys_db, bad_db, cluster, skip_tests, db_version): if "enterprise" in skip_tests: pytest.skip("Only for ArangoDB enterprise edition") @@ -112,8 +150,7 @@ def test_backup_management(sys_db, bad_db, cluster, skip_tests, db_version): result = sys_db.backup.restore(backup_id_foo) assert isinstance(result, dict) - # Wait for restore to complete - time.sleep(10) + wait_for_cluster_resilient(sys_db) # Test restore backup with bad database. with assert_raises(BackupRestoreError) as err: From 88b59eb89fb43148e796d33f001b11f67cac3ff4 Mon Sep 17 00:00:00 2001 From: Alex Petenchea Date: Mon, 2 Mar 2026 15:37:52 +0800 Subject: [PATCH 33/33] Prevent failing test after compaction (#390) * Prevent failing test after compaction * Fixed issues --- tests/test_collection.py | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/tests/test_collection.py b/tests/test_collection.py index 0f0f2d3a..6ac6dcad 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -1,3 +1,5 @@ +import time + import pytest from arango.client import ArangoClient @@ -196,13 +198,31 @@ def test_collection_management(db, bad_db, cluster): } ] - col = db.create_collection( - name=col_name, key_generator="autoincrement", key_increment=9, key_offset=100 + col = None + for _ in range(10): + try: + col = db.create_collection( + name=col_name, + key_generator="autoincrement", + key_increment=9, + key_offset=100, + ) + except CollectionCreateError: + print( + "Failed to create collection with autoincrement key generator, " + "retrying..." + ) + time.sleep(3) + continue + key_options = col.properties()["key_options"] + assert key_options["key_generator"] == "autoincrement" + assert key_options["key_increment"] == 9 + assert key_options["key_offset"] == 100 + break + assert col is not None, ( + "Failed to create collection with autoincrement " + "key generator after multiple attempts" ) - key_options = col.properties()["key_options"] - assert key_options["key_generator"] == "autoincrement" - assert key_options["key_increment"] == 9 - assert key_options["key_offset"] == 100 col_name = generate_col_name()