8000 Fixed unique-constrain-violated behaviour in SkiplistIndex · jameswei/arangodb@c35ccc0 · GitHub
[go: up one dir, main page]

Skip to content

Commit c35ccc0

Browse files
committed
Fixed unique-constrain-violated behaviour in SkiplistIndex
1 parent e0983ef commit c35ccc0

File tree

2 files changed

+140
-0
lines changed

2 files changed

+140
-0
lines changed

arangod/Indexes/SkiplistIndex.cpp

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -882,6 +882,12 @@ int SkiplistIndex::insert (TRI_doc_mptr_t const* doc,
882882
for (size_t i = 0; i < count; ++i) {
883883
res = _skiplistIndex->insert(elements[i]);
884884

885+
if (res == TRI_ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED &&
886+
! _unique) {
887+
// We ignore unique_constraint violated if we are not unique
888+
res = TRI_ERROR_NO_ERROR;
889+
}
890+
885891
if (res != TRI_ERROR_NO_ERROR) {
886892
TRI_index_element_t::free(elements[i]);
887893
// Note: this element is freed already

js/server/tests/shell-array-index-noncluster.js

Lines changed: 134 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,140 @@ function arrayHashIndexSuite () {
160160
res = collection.BY_EXAMPLE_HASH(idx, {a: 2, b: "b"}, 0, null).documents;
161161
assertEqual(res.length, 1);
162162
assertEqual(res[0]._id, id);
163+
164+
// It should be possible to insert arbitarary null values
165+
166+
var id1 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
167+
var id2 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
168+
var id3 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
169+
var ids = [id1, id2, id3].sort();
170+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: "duplicate"}, 0, null).documents;
171+
res = res.map(function(r) { return r._id; }).sort();
172+
assertEqual(res.length, 3);
173+
assertEqual(res, ids);
174+
175+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: null}, 0, null).documents;
176+
res = res.map(function(r) { return r._id; }).sort();
177+
assertEqual(res.length, 3);
178+
assertEqual(res, ids);
179+
180+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: "duplicate"}, 0, null).documents;
181+
res 10000 = res.map(function(r) { return r._id; }).sort();
182+
assertEqual(res.length, 3);
183+
assertEqual(res, ids);
184+
185+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: null}, 0, null).documents;
186+
res = res.map(function(r) { return r._id; }).sort();
187+
assertEqual(res.length, 3);
188+
assertEqual(res, ids);
189+
},
190+
191+
////////////////////////////////////////////////////////////////////////////////
192+
/// @brief test: Multiple identical elements in sparse array
193+
////////////////////////////////////////////////////////////////////////////////
194+
195+
testInsertAndReadArrayCombinedSparse : function () {
196+
var idx = collection.ensureHashIndex("a[*]", "b[*]", {sparse: true}).id;
197+
198+
var id = collection.save({a: [1, 2], b: ["a", "b"]})._id;
199+
200+
// All combinations should be in the index.
201+
var res = collection.BY_EXAMPLE_HASH(idx, {a: 1, b: "a"}, 0, null).documents;
202+
assertEqual(res.length, 1);
203+
assertEqual(res[0]._id, id);
204+
205+
res = collection.BY_EXAMPLE_HASH(idx, {a: 2, b: "a"}, 0, null).documents;
206+
assertEqual(res.length, 1);
207+
assertEqual(res[0]._id, id);
208+
209+
res = collection.BY_EXAMPLE_HASH(idx, {a: 1, b: "b"}, 0, null).documents;
210+
assertEqual(res.length, 1);
211+
assertEqual(res[0]._id, id);
212+
213+
res = collection.BY_EXAMPLE_HASH(idx, {a: 2, b: "b"}, 0, null).documents;
214+
assertEqual(res.length, 1);
215+
assertEqual(res[0]._id, id);
216+
217+
// It should be possible to insert arbitarary null values
218+
219+
var id1 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
220+
var id2 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
221+
var id3 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
222+
var ids = [id1, id2, id3].sort();
223+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: "duplicate"}, 0, null).documents;
224+
res = res.map(function(r) { return r._id; }).sort();
225+
assertEqual(res.length, 3);
226+
assertEqual(res, ids);
227+
228+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: null}, 0, null).documents;
229+
assertEqual(res.length, 0);
230+
231+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: "duplicate"}, 0, null).documents;
232+
assertEqual(res.length, 0);
233+
234+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: null}, 0, null).documents;
235+
assertEqual(res.length, 0);
236+
},
237+
238+
////////////////////////////////////////////////////////////////////////////////
239+
/// @brief test: Multiple identical elements in unique array
240+
////////////////////////////////////////////////////////////////////////////////
241+
242+
testInsertAndReadArrayCombinedUnique : function () {
243+
var idx = collection.ensureHashIndex("a[*]", "b[*]", {unique: true}).id;
244+
245+
var id = collection.save({a: [1, 2], b: ["a", "b"]})._id;
246+
247+
// All combinations should be in the index.
248+
var res = collection.BY_EXAMPLE_HASH(idx, {a: 1, b: "a"}, 0, null).documents;
249+
assertEqual(res.length, 1);
250+
assertEqual(res[0]._id, id);
251+
252+
res = collection.BY_EXAMPLE_HASH(idx, {a: 2, b: "a"}, 0, null).documents;
253+
assertEqual(res.length, 1);
254+
assertEqual(res[0]._id, id);
255+
256+
res = collection.BY_EXAMPLE_HASH(idx, {a: 1, b: "b"}, 0, null).documents;
257+
assertEqual(res.length, 1);
258+
assertEqual(res[0]._id, id);
259+
260+
res = collection.BY_EXAMPLE_HASH(idx, {a: 2, b: "b"}, 0, null).documents;
261+
assertEqual(res.length, 1);
262+
assertEqual(res[0]._id, id);
263+
264+
// It should be possible to insert arbitarary null values
265+
266+
// This should be insertable
267+
var id1 = collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]})._id;
268+
269+
try {
270+
// This should not be insertable we have the one before
271+
collection.save({a: ["duplicate", null, "duplicate"], b: ["duplicate", null, "duplicate"]});
272+
fail();
273+
} catch (e) {
274+
assertEqual(e.errorNum, errors.ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED.code);
275+
}
276+
277+
var ids = [id1];
278+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: "duplicate"}, 0, null).documents;
279+
res = res.map(function(r) { return r._id; }).sort();
280+
assertEqual(res.length, 1);
281+
assertEqual(res, ids);
282+
283+
res = collection.BY_EXAMPLE_HASH(idx, {a: "duplicate", b: null}, 0, null).documents;
284+
res = res.map(function(r) { return r._id; }).sort();
285+
assertEqual(res.length, 1);
286+
assertEqual(res, ids);
287+
288+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: "duplicate"}, 0, null).documents;
289+
res = res.map(function(r) { return r._id; }).sort();
290+
assertEqual(res.length, 1);
291+
assertEqual(res, ids);
292+
293+
res = collection.BY_EXAMPLE_HASH(idx, {a: null, b: null}, 0, null).documents;
294+
res = res.map(function(r) { return r._id; }).sort();
295+
assertEqual(res.length, 1);
296+
assertEqual(res, ids);
163297
},
164298

165299
////////////////////////////////////////////////////////////////////////////////

0 commit comments

Comments
 (0)
0