8000 Adding a couple of tests. · JavaScriptExpert/simdjson@84dc398 · GitHub
[go: up one dir, main page]

Skip to content

Commit 84dc398

Browse files
committed
Adding a couple of tests.
1 parent f6a3205 commit 84dc398

File tree

1 file changed

+61
-0
lines changed

1 file changed

+61
-0
lines changed

benchmark/bench_dom_api.cpp

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,68 @@ const padded_string EMPTY_ARRAY("[]", 2);
1111
const char *TWITTER_JSON = SIMDJSON_BENCHMARK_DATA_DIR "twitter.json";
1212
const char *NUMBERS_JSON = SIMDJSON_BENCHMARK_DATA_DIR "numbers.json";
1313

14+
static void recover_one_string(State& state) {
15+
dom::parser parser;
16+
const std::string_view data = "\"one string\"";
17+
padded_string docdata{data};
18+
// we do not want mem. alloc. in the loop.
19+
auto error = parser.allocate(docdata.size());
20+
if(error) {
21+
cout << error << endl;
22+
return;
23+
}
24+
dom::element doc;
25+
if (error = parser.parse(docdata).get(doc)) {
26+
cerr << "could not parse string" << error << endl;
27+
return;
28+
}
29+
for (UNUSED auto _ : state) {
30+
std::string_view v;
31+
error = doc.get(v);
32+
if (error) {
33+
cerr << "could not get string" << error << endl;
34+
return;
35+
}
36+
benchmark::DoNotOptimize(v);
37+
}
38+
}
39+
BENCHMARK(recover_one_string);
40+
1441

42+
static void serialize_twitter(State& state) {
43+
dom::parser parser;
44+
padded_string docdata;
45+
auto error = padded_string::load(TWITTER_JSON).get(docdata);
46+
if(error) {
47+
cerr << "could not parse twitter.json" << error << endl;
48+
return;
49+
}
50+
// we do not want mem. alloc. in the loop.
51+
error = parser.allocate(docdata.size());
52+
if(error) {
53+
cout << error << endl;
54+
return;
55+
}
56+
dom::element doc;
57+
if ((error = parser.parse(docdata).get(doc))) {
58+
cerr << "could not parse twitter.json" << error << endl;
59+
return;
60+
}
61+
size_t bytes = 0;
62+
for (UNUSED auto _ : state) {
63+
std::string serial = simdjson::minify(doc);
64+
bytes += serial.size();
65+
benchmark::DoNotOptimize(serial);
66+
}
67+
// Gigabyte: https://en.wikipedia.org/wiki/Gigabyte
68+
state.counters["Gigabytes"] = benchmark::Counter(
69+
double(bytes), benchmark::Counter::kIsRate,
70+
benchmark::Counter::OneK::kIs1000); // For GiB : kIs1024
71+
state.counters["docs"] = Counter(double(state.iterations()), benchmark::Counter::kIsRate);
72+
}
73+
BENCHMARK(serialize_twitter)->Repetitions(10)->ComputeStatistics("max", [](const std::vector<double>& v) -> double {
74+
return *(std::max_element(std::begin(v), std::end(v)));
75+
})->DisplayAggregatesOnly(true);
1576

1677
static void numbers_scan(State& state) {
1778
// Prints the number of results in twitter.json

0 commit comments

Comments
 (0)
0