Skip to content

Commit

Permalink
Merge pull request #241 from ArangoDB-Community/dev
Browse files Browse the repository at this point in the history
Dev to master
  • Loading branch information
tariqdaouda authored Apr 14, 2023
2 parents 9350478 + 74624bc commit 2640184
Show file tree
Hide file tree
Showing 7 changed files with 151 additions and 88 deletions.
8 changes: 7 additions & 1 deletion CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
2.0.2
=====
* Fixed contains functions
* Added UniqueConstrainViolation exception, inherits from CreationError

2.0.1
=====

* Fixed max retries for write conflicts
* Added parameter ``pool_maxsize`` on class ``Connection`` to allow user configure the http pool size.
=======

2.0
=====
Expand All @@ -10,7 +17,6 @@
* added to_default function to reset a document to its default values
* fixed bug in default documents where default values could be overwritten
* default value for fields is now None
* defaual value for fields can now be a callable

1.3.5
=====
Expand Down
63 changes: 28 additions & 35 deletions pyArango/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,9 @@ def getDefaultDocument(self, fields=None, dct=None):
for k, v in fields.items():
if isinstance(v, dict):
dct[k] = self.getDefaultDocument(fields[k], None)
elif isinstance(v, list) or isinstance(v, tuple):
dct[k] = []

elif isinstance(v, Field):
if callable(v.default):
dct[k] = v.default()
Expand Down Expand Up @@ -338,7 +341,7 @@ def _writeBatch(self):
raise UpdateError("Mixed bulk operations not supported - have " + str(self._bulkMode))
payload = []
for d in self._bulkCache:
if type(d) is dict:
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
Expand All @@ -355,15 +358,15 @@ def _writeBatch(self):
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError == None:
if bulkError is None:
bulkError = BulkOperationError("saving failed")
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
else:
self._bulkCache[i].setPrivates(xd)
self._bulkCache[i]._key = \
xd['_key']
i += 1
if bulkError != None:
if bulkError is not None:
self._bulkCache = []
raise bulkError

Expand Down Expand Up @@ -391,7 +394,7 @@ def _updateBatch(self):
if d.collection._validation['on_save']:
d.validate()

if type(d) is dict:
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
Expand All @@ -407,7 +410,7 @@ def _updateBatch(self):
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError == None:
if bulkError is None:
bulkError = BulkOperationError("patching failed")
bulkError.addBulkError(ArangoError(xd), str(self._bulkCache[i]))
else:
Expand All @@ -416,7 +419,7 @@ def _updateBatch(self):
xd['_key']
i += 1
self._bulkCache = []
if bulkError != None:
if bulkError is not None:
raise bulkError


Expand All @@ -437,7 +440,7 @@ def _removeBatch(self):
raise UpdateError("Mixed bulk operations not supported - have " + self._bulkMode)
payload = []
for d in self._bulkCache:
if type(d) is dict:
if isinstance(d,dict):
payload.append('"%s"' % d['_key'])
else:
try:
Expand All @@ -454,14 +457,14 @@ def _removeBatch(self):
bulkError = None
for xd in data:
if not '_key' in xd and 'error' in xd and 'errorNum' in xd:
if bulkError == None:
if bulkError is None:
bulkError = BulkOperationError("deleting failed")
bulkError.addBulkError(ArangoError(xd), self._bulkCache[i])
else:
self._bulkCache[i].reset(self)
i += 1
self._bulkCache = []
if bulkError != None:
if bulkError is not None:
raise bulkError

def _deleteBatch(self, document, params):
Expand Down Expand Up @@ -674,8 +677,7 @@ def fetchDocument(self, key, rawResults = False, rev = None):
return self.documentClass(self, r.json(), on_load_validation=self._validation["on_load"])
elif r.status_code == 404 :
raise DocumentNotFoundError("Unable to find document with _key: %s" % key, r.json())
else:
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json())
raise DocumentNotFoundError("Unable to find document with _key: %s, response: %s" % (key, r.json()), r.json())

def fetchByExample(self, exampleDict, batchSize, rawResults = False, **queryArgs):
"""exampleDict should be something like {'age' : 28}"""
Expand All @@ -689,7 +691,7 @@ def fetchFirstExample(self, exampleDict, rawResults = False):
def fetchAll(self, rawResults = False, **queryArgs):
"""Returns all the documents in the collection. You can use the optinal arguments 'skip' and 'limit'::
fetchAlll(limit = 3, shik = 10)"""
fetchAll(limit = 3, skip = 10)"""
return self.simpleQuery('all', rawResults = rawResults, **queryArgs)

def simpleQuery(self, queryType, rawResults = False, **queryArgs):
Expand All @@ -711,7 +713,7 @@ def bulkSave(self, docs, onDuplicate="error", **params):

payload = []
for d in docs:
if type(d) is dict:
if isinstance(d,dict):
payload.append(json.dumps(d, default=str))
else:
try:
Expand All @@ -730,11 +732,10 @@ def bulkSave(self, docs, onDuplicate="error", **params):
data = r.json()
if (r.status_code == 201) and "error" not in data:
return True
else:
if "errors" in data and data["errors"] > 0:
raise UpdateError("%d documents could not be created" % data["errors"], data)
elif data["error"]:
raise UpdateError("Documents could not be created", data)
if "errors" in data and data["errors"] > 0:
raise UpdateError("%d documents could not be created" % data["errors"], data)
elif data["error"]:
raise UpdateError("Documents could not be created", data)

return data["updated"] + data["created"]

Expand All @@ -749,10 +750,8 @@ def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **pa
data = f.read()
r = self.connection.session.post(url, params = params, data = data)

try:
errorMessage = "At least: %d errors. The first one is: '%s'\n\n more in <this_exception>.data" % (len(data), data[0]["errorMessage"])
except KeyError:
raise UpdateError(data['errorMessage'], data)
if r.status_code != 201:
raise UpdateError('Unable to bulk import JSON', r)

def bulkImport_values(self, filename, onDuplicate="error", **params):
"""bulk import from a file repecting arango's json format"""
Expand All @@ -764,10 +763,8 @@ def bulkImport_values(self, filename, onDuplicate="error", **params):
data = f.read()
r = self.connection.session.post(url, params = params, data = data)

try:
errorMessage = "At least: %d errors. The first one is: '%s'\n\n more in <this_exception>.data" % (len(data), data[0]["errorMessage"])
except KeyError:
raise UpdateError(data['errorMessage'], data)
if r.status_code != 201:
raise UpdateError('Unable to bulk import values', r)

def truncate(self):
"""deletes every document in the collection"""
Expand Down Expand Up @@ -811,8 +808,7 @@ def getType(self):
return "document"
elif self.type == CONST.COLLECTION_EDGE_TYPE:
return "edge"
else:
raise ValueError("The collection is of Unknown type %s" % self.type)
raise ValueError("The collection is of Unknown type %s" % self.type)

def getStatus(self):
"""returns a word describing the status of the collection (loaded, loading, deleted, unloaded, newborn) instead of a number, if you prefer the number it's in self.status"""
Expand All @@ -826,8 +822,7 @@ def getStatus(self):
return "unloaded"
elif self.status == CONST.COLLECTION_NEWBORN_STATUS:
return "newborn"
else:
raise ValueError("The collection has an Unknown status %s" % self.status)
raise ValueError("The collection has an Unknown status %s" % self.status)

def __len__(self):
"""returns the number of documents in the collection"""
Expand Down Expand Up @@ -881,8 +876,7 @@ def validateField(cls, fieldName, value):
except SchemaViolation as e:
if fieldName == "_from" or fieldName == "_to":
return True
else:
raise e
raise e
return valValue

def createEdge(self, initValues = None):
Expand All @@ -902,7 +896,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
If rawResults a arango results will be return as fetched, if false, will return a liste of Edge objects"""
if isinstance(vertex, Document):
vId = vertex._id
elif (type(vertex) is str) or (type(vertex) is bytes):
elif isinstance(vertex,str) or isinstance(vertex,bytes):
vId = vertex
else:
raise ValueError("Vertex is neither a Document nor a String")
Expand All @@ -925,8 +919,7 @@ def getEdges(self, vertex, inEdges = True, outEdges = True, rawResults = False):
for e in data["edges"]:
ret.append(Edge(self, e))
return ret
else:
return data["edges"]
return data["edges"]
else:
raise CreationError("Unable to return edges for vertex: %s" % vId, data)

Expand Down
Loading

0 comments on commit 2640184

Please sign in to comment.