-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathraw-insert.js
131 lines (120 loc) · 3.97 KB
/
raw-insert.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
const path = require('path');
const dotenv = require('dotenv-safe');
const { MongoClient } = require('mongodb');
const _ = require('lodash');
const Bulker = require('./bulk');
dotenv.config({
path: path.join(__dirname, '.env'),
sample: path.join(__dirname, '.env.example'),
});
const ENDPOINT = process.argv[2];
const NUMBER_OF_LOOP = parseInt(process.argv[3], 10);
const MONGO_URI = process.env.MONGO_URI;
const MONGO_DB_NAME = process.env.MONGO_DB_NAME;
const INSERT_BULK_TIMEOUT =
parseInt(process.env.INSERT_BULK_TIMEOUT, 10) || 1000;
const INSERT_BULK_SIZE = parseInt(process.env.INSERT_BULK_SIZE, 10) || 1000;
const INSERT_GROUP_TIMEOUT =
parseInt(process.env.INSERT_GROUP_TIMEOUT, 10) || 100000;
const INSERT_GROUP_SIZE = parseInt(process.env.INSERT_GROUP_SIZE, 10) || 100000;
const INSERT_GROUP_CHUNK_SIZE =
parseInt(process.env.INSERT_GROUP_CHUNK_SIZE, 10) || 100;
const mongoClient = new MongoClient(MONGO_URI, {
useUnifiedTopology: true,
maxPoolSize: 100, // Set equal to concurrent request to avoid waiting for connection
maxIdleTimeMS: 10000,
// loggerLevel: 'debug',
});
const db = mongoClient.db(MONGO_DB_NAME);
let counter = 0;
const insertBulker = new Bulker(
INSERT_BULK_SIZE,
INSERT_BULK_TIMEOUT,
async (items) => {
await db.collection('logs').insertMany(items);
}
);
const groupBulker = new Bulker(
INSERT_GROUP_SIZE,
INSERT_GROUP_TIMEOUT,
async (items) => {
let chunk = [];
const start = new Date();
for (let i = 0; i < items.length; i++) {
await (async function (item) {
chunk.push(db.collection('logs').insertOne(item));
if (i % INSERT_GROUP_CHUNK_SIZE === 0) {
await Promise.all(chunk);
chunk = [];
}
})(items[i]);
}
console.log(`Group taked ${new Date() - start} ms`);
}
);
async function run() {
const start = new Date();
async function insert(body) {
await db
.collection('logs')
.insertOne(body)
.then(() => {
counter += 1;
if (counter === NUMBER_OF_LOOP) {
const taked = new Date() - start;
console.log(
`Taked ${taked} ms, avg ${
(NUMBER_OF_LOOP / taked) * 1000
} / s`
);
}
});
}
switch (ENDPOINT) {
case 'insert_sync':
for (let i = 0; i < NUMBER_OF_LOOP; i++) {
await insert({
title: 'My awesome test',
description: 'This is a test',
});
}
break;
case 'insert_async':
for (let i = 0; i < NUMBER_OF_LOOP; i++) {
insert({
title: 'My awesome test',
description: 'This is a test',
});
}
break;
case 'insert_group':
for (let i = 0; i < NUMBER_OF_LOOP; i++) {
groupBulker.push({
title: 'My awesome test',
description: 'This is a test',
});
}
break;
case 'insert_bulk':
for (let i = 0; i < NUMBER_OF_LOOP; i++) {
insertBulker.push({
title: 'My awesome test',
description: 'This is a test',
});
}
setInterval(() => {
if (insertBulker.getCounter() === NUMBER_OF_LOOP) {
const taked = new Date() - start;
console.log(
`Taked ${taked} ms, avg ${
(NUMBER_OF_LOOP / taked) * 1000
} / s`
);
process.exit();
}
}, 50);
default:
break;
}
}
mongoClient.connect().then(run);