Skip to content

Commit

Permalink
Adds storage info to export state
Browse files Browse the repository at this point in the history
  • Loading branch information
crystalin committed Nov 16, 2023
1 parent d2a6a09 commit fefce98
Showing 1 changed file with 76 additions and 27 deletions.
103 changes: 76 additions & 27 deletions src/tools/export-state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import fs from "fs";
import path from "path";
import "@polkadot/api-augment";
import "@moonbeam-network/api-augment";
import { getWsProviderFor, NETWORK_YARGS_OPTIONS } from "../utils/networks";
import { getApiFor, getWsProviderFor, NETWORK_YARGS_OPTIONS } from "../utils/networks";
import { hexToNumber } from "@polkadot/util";
import { processAllStorage } from "../utils/storage";
import moment from "moment";
Expand All @@ -14,8 +14,8 @@ const argv = yargs(process.argv.slice(2))
.options({
...NETWORK_YARGS_OPTIONS,
"at-block": { type: "number", demandOption: false },
"concurrency": { type: "number", demandOption: false, default: 10 },
"delay": { type: "number", demandOption: false, default: 0 },
concurrency: { type: "number", demandOption: false, default: 10 },
delay: { type: "number", demandOption: false, default: 0 },
"raw-spec": { type: "string", demandOption: true },
"path-prefix": { type: "string", demandOption: true },
}).argv;
Expand All @@ -24,37 +24,26 @@ async function main() {
const ws = await getWsProviderFor(argv);
await ws.isReady;

const now = moment().format('YYYY-MM-DD');
const now = moment().format("YYYY-MM-DD");

const atBlock =
argv["at-block"] || hexToNumber((await ws.send("chain_getBlock", [])).block.header.number);

const concurrency = argv.concurrency || 10;
const delay = argv.delay;
console.log(`${now}: Exporting at block ${atBlock} using ${ws.endpoint} and ${concurrency} threads (+${delay}ms delay)`);
console.log(
`${now}: Exporting at block ${atBlock} using ${ws.endpoint} and ${concurrency} threads (+${delay}ms delay)`
);

const chainName = await ws.send("system_chain", [])
const chainName = await ws.send("system_chain", []);
const blockHash = await ws.send("chain_getBlockHash", [atBlock]);
const runtimeVersion = await ws.send("state_getRuntimeVersion", [blockHash]);
const chainId = await ws.send("net_version", [blockHash]);


const filename = `${argv["path-prefix"]}-${now}.json`;
const metaFilename = `${argv["path-prefix"]}-${now}.info.json`;

const file = fs.createWriteStream(filename, "utf8");

fs.writeFileSync(
metaFilename,
JSON.stringify({
"file": path.basename(filename),
"name": chainName,
"chainId": chainId,
"blockHash": blockHash,
"blockNumber": atBlock,
"runtime": runtimeVersion,
}, null, 2),
"utf8");
const rawSpec = JSON.parse(fs.readFileSync(argv["raw-spec"], "utf8"));
rawSpec["bootNodes"] = [];
rawSpec["telemetryEndpoints"] = [];
Expand All @@ -80,11 +69,42 @@ async function main() {
break;
}
}

const storageNames: { [prefix: string]: string } = {};
const storageCount: { [prefix: string]: bigint } = {};
const storageKeySize: { [prefix: string]: number } = {};
const storageSize: { [prefix: string]: bigint } = {};

const api = await getApiFor(argv);
for (const section of Object.keys(api.query)) {
for (const method of Object.keys(api.query[section])) {
const prefix = api.query[section][method].keyPrefix();
storageNames[prefix] = `${section}.${method}`;
storageCount[prefix] = 0n;
storageSize[prefix] = 0n;
}
}
api.disconnect();

const storagePrefixes = Object.keys(storageNames);
let total = 0;
await processAllStorage(ws, { prefix: "0x", blockHash, splitDepth: 2, concurrency, delayMS: delay }, (batchResult) => {
total += batchResult.length;
file.write(batchResult.map((c) => ` "${c.key}": "${c.value}",\n`).join(""));
});
await processAllStorage(
ws,
{ prefix: "0x", blockHash, splitDepth: 2, concurrency, delayMS: delay },
(batchResult) => {
total += batchResult.length;
file.write(batchResult.map((c) => ` "${c.key}": "${c.value}",\n`).join(""));
for (const line of batchResult) {
for (const storagePrefix of storagePrefixes) {
if (line.key.startsWith(storagePrefix)) {
storageCount[storagePrefix] += 1n;
storageKeySize[storagePrefix] = line.key.length / 2 - 1;
storageSize[storagePrefix] += BigInt(line.value.length / 2) - 1n;
}
}
}
}
);
file.write(` \n`);
while (true) {
const line = rawSpecLines.shift();
Expand All @@ -96,14 +116,43 @@ async function main() {
const t1 = performance.now();
const duration = t1 - t0;
const qps = total / (duration / 1000);
console.log(`Written ${total} keys in ${moment.duration(duration / 1000, "seconds").humanize()}: ${qps.toFixed(0)} keys/sec`);

fs.writeFileSync(
metaFilename,
JSON.stringify(
{
file: path.basename(filename),
name: chainName,
chainId: chainId,
blockHash: blockHash,
blockNumber: atBlock,
runtime: runtimeVersion,
storages: Object.keys(storageNames).map((prefix) => {
return {
name: storageNames[prefix],
prefix: prefix,
count: storageCount[prefix].toString(),
size: storageSize[prefix]?.toString(),
keySize: storageKeySize[prefix],
};
}),
},
null,
2
),
"utf8"
);

console.log(
`Written ${total} keys in ${moment
.duration(duration / 1000, "seconds")
.humanize()}: ${qps.toFixed(0)} keys/sec`
);
} catch (e) {
console.log("ERROR:");
console.log(e);
console.trace(e);
}
finally {

} finally {
file.close();
await ws.disconnect();
}
Expand Down

0 comments on commit fefce98

Please sign in to comment.