10000 Rdb index background (preliminary) by graetzer · Pull Request #7644 · arangodb/arangodb · GitHub
[go: up one dir, main page]

Skip to content

Rdb index background (preliminary) #7644

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 39 commits into from
Dec 21, 2018
Merged
Changes from 1 commit
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
3f6967c
Initial commit
graetzer Dec 3, 2018
38b3500
make sure index is hidden
graetzer Dec 3, 2018
0ddb321
Merge branch 'devel' of github.com:arangodb/arangodb into feature/rdb…
graetzer Dec 4, 2018
9bcc737
last changes
graetzer Dec 4, 2018
c57de4f
fix a bug
graetzer Dec 4, 2018
c798357
reduce conflicts
graetzer Dec 5, 2018
4766900
fix background indexing
graetzer Dec 6, 2018
b525712
remove unused code
graetzer Dec 6, 2018
0515650
fix link creation
graetzer Dec 6, 2018
70ef2f1
fix unique constraint violations
graetzer Dec 6, 2018
e6d23c1
fixed arangosearch cluster reporting
graetzer Dec 6, 2018
13bcd44
added test
graetzer Dec 6, 2018
9e5f960
fix test
graetzer Dec 6, 2018
6bfd840
make noncluster for now
graetzer Dec 6, 2018
3d20521
fix jslint
graetzer Dec 7, 2018
38005e0
Some test adjustments.
Dec 10, 2018
dc041fb
Merge branch 'devel' into feature/rdb-index-background
Dec 10, 2018
a7ae28a
Fix merge error.
Dec 10, 2018
db46a40
changes
graetzer Dec 10, 2018
9db5709
Merge branch 'feature/rdb-index-background' of github.com:arangodb/ar…
graetzer Dec 10, 2018
ef4bb05
adding inBackground flag
graetzer Dec 11, 2018
5546745
Merge branch 'devel' into feature/rdb-index-background
Dec 11, 2018
2ebf591
Fix merge errors.
Dec 11, 2018
2082f78
adding some docs
graetzer Dec 12, 2018
3efc632
Merge branch 'devel' into feature/rdb-index-background
Dec 12, 2018
98ddd16
Some small changes.
Dec 12, 2018
ee51fa5
Fixed removal bug and added test.
Dec 12, 2018
422e0ce
Added update test.
Dec 17, 2018
31d9d7b
Merge branch 'devel' of github.com:arangodb/arangodb into feature/rdb…
graetzer Dec 18, 2018
a34f928
forgot to comment out docs
graetzer Dec 18, 2018
021453c
fixing some code
graetzer Dec 19, 2018
f61b5e9
fix jslint
graetzer Dec 19, 2018
b013b15
remove some code
graetzer Dec 19, 2018
82f4b6e
fix reporting of unfinished indexes
graetzer Dec 20, 2018
b7d6a9d
Merge branch 'devel' of github.com:arangodb/arangodb into feature/rdb…
graetzer Dec 20, 2018
2f1cc80
fixing fillIndex for iresearch
graetzer Dec 21, 2018
371c738
revert a change
graetzer Dec 21, 2018
7f4e2e7
Merge branch 'devel' of github.com:arangodb/arangodb into feature/rdb…
graetzer Dec 21, 2018
3f46816
fixng a deadlock
graetzer Dec 21, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
added test
  • Loading branch information
graetzer committed Dec 6, 2018
commit 13bcd447204d9bcb55cd36b475b42d5e39d57f69
201 changes: 161 additions & 40 deletions tests/js/common/shell/shell-index-rocksdb.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
/// @author Copyright 2012, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////

var jsunity = require("jsunity");
var internal = require("internal");
var errors = internal.errors;
var testHelper = require("@arangodb/test-helper").Helper;
const jsunity = require("jsunity");
const internal = require("internal");
const errors = internal.errors;
const db = internal.db;

function backgroundIndexSuite() {
'use strict';
Expand All @@ -41,8 +41,8 @@ function backgroundIndexSuite() {
return {

setUp : function () {
internal.db._drop(cn);
internal.db._create(cn);
db._drop(cn);
db._create(cn);
},

tearDown : function () {
Expand All @@ -55,14 +55,26 @@ function backgroundIndexSuite() {
}
}
});
internal.db._drop(cn);
db._drop(cn);
},

testInsertInParallel: function () {
let n = 10;
testInsertParallelNonUnique: function () {
let c = require("internal").db._collection(cn);
// first lets add some initial documents
let x = 10;
while(x-- > 0) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value:i})
}
c.save(docs);
}

// lets insert the rest via tasks
let n = 9;
for (let i = 0; i < n; ++i) {
let command = `let c = require("internal").db._collection("${cn}");
let x = 25; while(x-- > 0) {
let x = 10; while(x-- > 0) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value:i})
Expand All @@ -72,57 +84,166 @@ function backgroundIndexSuite() {
tasks.register({ name: "UnitTestsIndexInsert" + i, command: command });
}

// create the index on the main thread
c.ensureIndex({type: 'hash', fields: ['value'], unique: false});

let time = require("internal").time;
let start = time();
while (true) {
let indexes = require("internal").db._collection(cn).getIndexes();
if (indexes.length === n + 1) {
// primary index + user-defined indexes
if (c.count() === 100000) {
break;
}
if (time() - start > 180) {
// wait for 3 minutes maximum
fail("Timeout creating 80 indices after 3 minutes: " + JSON.stringify(indexes));
if (time() - start > 180) { // wait for 3 minutes maximum
fail("Timeout creating documents after 3 minutes");
}
require("internal").wait(0.5, false);
}

let indexes = require("internal").db._collection(cn).getIndexes();
assertEqual(n + 1, indexes.length);

// 250 entries of each value [0,999]
for (let i = 0; i < 1000; i++) {
let cursor = db._query("FOR doc IN @@coll FILTER doc.value == @val RETURN 1",
{'@coll': cn, 'val': i}, {count:true});
assertEqual(cursor.count(), 100);
}

const estimate = 1000.0 / 100000.0;

let indexes = c.getIndexes(true);
for (let i of indexes) {
switch (i.type) {
case 'primary':
break;
case 'hash':
assertEqual(i.selectivityEstimate, estimate);
break;
default:
fail();
}
}
},

testCreateInParallelDuplicate: function () {
let n = 100;
for (let i = 0; i < n; ++i) {
let command = 'require("internal").db._collection("' + cn + '").ensureIndex({ type: "hash", fields: ["value' + (i % 4) + '"] });';
tasks.register({ name: "UnitTestsIndexCreate" + i, command: command });
testInsertParallelUnique: function () {
10000 let c = require("internal").db._collection(cn);
// first lets add some initial documents
let x = 0;
while(x < 10000) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value: x++})
}
c.save(docs);
}

// lets insert the rest via tasks
for (let i = 1; i < 5; ++i) {
let command = `let c = require("internal").db._collection("${cn}");
let x = ${i} * 10000;
while(x < ${i + 1} * 10000) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value: x++})
}
c.save(docs);
}`;
tasks.register({ name: "UnitTestsIndexInsert" + i, command: command });
}

// create the index on the main thread
c.ensureIndex({type: 'hash', fields: ['value'], unique: true});

let time = require("internal").time;
let start = time();
while (true) {
let indexes = require("internal").db._collection(cn).getIndexes();
if (indexes.length === 4 + 1) {
// primary index + user-defined indexes
if (c.count() === 50000) {
break;
}
if (time() - start > 180) {
// wait for 3 minutes maximum
fail("Timeout creating indices after 3 minutes: " + JSON.stringify(indexes));
if (time() - start > 300) { // wait for 5 minutes maximum
fail("Timeout creating documents after 5 minutes: " + c.count());
}
require("internal").wait(0.5, false);
}

// wait some extra time because we just have 4 distinct indexes
// these will be created relatively quickly. by waiting here a bit
// we give the other pending tasks a chance to execute too (but they
// will not do anything because the target indexes already exist)
require("internal").wait(5, false);

let indexes = require("internal").db._collection(cn).getIndexes();
assertEqual(4 + 1, indexes.length);
}

// 250 entries of each value [0,999]
for (let i = 0; i < 50000; i++) {
let cursor = db._query("FOR doc IN @@coll FILTER doc.value == @val RETURN 1",
{'@coll': cn, 'val': i}, {count:true});
assertEqual(cursor.count(), 1);
}

let indexes = c.getIndexes(true);
for (let i of indexes) {
switch (i.type) {
case 'primary':
break;
case 'hash':
assertEqual(i.selectivityEstimate, 1.0);
break;
default:
fail();
}
}
},

testInsertParallelUniqueConstraintViolation: function () {
let c = require("internal").db._collection(cn);
// first lets add some initial documents
let x = 0;
while(x < 10000) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value: x++})
}
c.save(docs);
}

// lets insert the rest via tasks
for (let i = 1; i < 5; ++i) {
let command = `let c = require("internal").db._collection("${cn}");
let x = ${i} * 10000;
while(x < ${i + 1} * 10000) {
let docs = [];
for(let i = 0; i < 1000; i++) {
docs.push({value: x++})
}
c.save(docs);
}`;
tasks.register({ name: "UnitTestsIndexInsert" + i, command: command });
}

c.save({value: 1 }); // now trigger a conflict
//tasks.register({ name: "UnitTestsIndexInsert6" + i, command: `require("internal").db._collection("${cn}").save({value: 1 });` });

try {
// create the index on the main thread
c.ensureIndex({type: 'hash', fields: ['value'], unique: true});
fail();
} catch(err) {
assertEqual(errors.ERROR_ARANGO_UNIQUE_CONSTRAINT_VIOLATED.code, err.errorNum);
}

let time = require("internal").time;
let start = time();
while (true) {
if (c.count() === 50000) {
break;
}
if (time() - start > 300) { // wait for 5 minutes maximum
fail("Timeout creating documents after 5 minutes: " + c.count());
}
require("internal").wait(0.5, false);
}

let indexes = c.getIndexes();
for (let i of indexes) {
switch (i.type) {
case 'primary':
break;
case 'hash':
default:
fail();
}
}
}
};
}

Expand Down
0