diff --git a/packages/beacon-node/src/chain/archiveStore/archiveStore.ts b/packages/beacon-node/src/chain/archiveStore/archiveStore.ts index a792ee046c0d..9446d18b2b25 100644 --- a/packages/beacon-node/src/chain/archiveStore/archiveStore.ts +++ b/packages/beacon-node/src/chain/archiveStore/archiveStore.ts @@ -2,7 +2,7 @@ import {CheckpointWithHex} from "@lodestar/fork-choice"; import {LoggerNode} from "@lodestar/logger/node"; import {Checkpoint} from "@lodestar/types/phase0"; import {callFnWhenAwait} from "@lodestar/utils"; -import {IBeaconDb} from "../../db/index.js"; +import {DatabaseType, IBeaconDb} from "../../db/index.js"; import {Metrics} from "../../metrics/metrics.js"; import {isOptimisticBlock} from "../../util/forkChoice.js"; import {JobItemQueue} from "../../util/queue/index.js"; @@ -23,7 +23,11 @@ type ArchiveStoreModules = { metrics: Metrics | null; }; -type ArchiveStoreInitOpts = ArchiveStoreOpts & {dbName: string; anchorState: {finalizedCheckpoint: Checkpoint}}; +type ArchiveStoreInitOpts = ArchiveStoreOpts & { + dbName: string; + dbType: DatabaseType; + anchorState: {finalizedCheckpoint: Checkpoint}; +}; export enum ArchiveStoreTask { ArchiveBlocks = "archive_blocks", @@ -120,6 +124,7 @@ export class ArchiveStore { opts: { genesisTime: this.chain.clock.genesisTime, dbLocation: this.opts.dbName, + dbType: this.opts.dbType, }, config: this.chain.config, metrics: this.metrics, diff --git a/packages/beacon-node/src/chain/archiveStore/historicalState/historicalStateRegen.ts b/packages/beacon-node/src/chain/archiveStore/historicalState/historicalStateRegen.ts index 87f2c3cf180a..26a8c3d87dc9 100644 --- a/packages/beacon-node/src/chain/archiveStore/historicalState/historicalStateRegen.ts +++ b/packages/beacon-node/src/chain/archiveStore/historicalState/historicalStateRegen.ts @@ -33,6 +33,7 @@ export class HistoricalStateRegen implements HistoricalStateWorkerApi { maxConcurrency: 1, maxLength: 50, dbLocation: modules.opts.dbLocation, + dbType: modules.opts.dbType, metricsEnabled: Boolean(modules.metrics), loggerOpts: modules.logger.toOpts(), }; diff --git a/packages/beacon-node/src/chain/archiveStore/historicalState/types.ts b/packages/beacon-node/src/chain/archiveStore/historicalState/types.ts index 51acd62b8064..05e69c51c703 100644 --- a/packages/beacon-node/src/chain/archiveStore/historicalState/types.ts +++ b/packages/beacon-node/src/chain/archiveStore/historicalState/types.ts @@ -1,12 +1,14 @@ import {ModuleThread} from "@chainsafe/threads"; import {BeaconConfig, SpecJson} from "@lodestar/config"; import {LoggerNode, LoggerNodeOpts} from "@lodestar/logger/node"; +import {DatabaseType} from "../../../db/index.ts"; import {Metrics} from "../../../metrics/index.js"; export type HistoricalStateRegenInitModules = { opts: { genesisTime: number; dbLocation: string; + dbType: DatabaseType; }; config: BeaconConfig; logger: LoggerNode; @@ -24,6 +26,7 @@ export type HistoricalStateWorkerData = { maxConcurrency: number; maxLength: number; dbLocation: string; + dbType: DatabaseType; metricsEnabled: boolean; loggerOpts: LoggerNodeOpts; }; diff --git a/packages/beacon-node/src/chain/archiveStore/historicalState/worker.ts b/packages/beacon-node/src/chain/archiveStore/historicalState/worker.ts index 2b8ede8b8e81..a1f9ca5b9e45 100644 --- a/packages/beacon-node/src/chain/archiveStore/historicalState/worker.ts +++ b/packages/beacon-node/src/chain/archiveStore/historicalState/worker.ts @@ -3,6 +3,7 @@ import {PubkeyIndexMap} from "@chainsafe/pubkey-index-map"; import {Transfer, expose} from "@chainsafe/threads/worker"; import {chainConfigFromJson, createBeaconConfig} from "@lodestar/config"; import {LevelDbController} from "@lodestar/db/controller/level"; +import {LmdbController} from "@lodestar/db/controller/lmdb"; import {getNodeLogger} from "@lodestar/logger/node"; import {BeaconDb} from "../../../db/index.js"; import {RegistryMetricCreator, collectNodeJSMetrics} from "../../../metrics/index.js"; @@ -26,7 +27,11 @@ logger.info("Historical state worker started"); const config = createBeaconConfig(chainConfigFromJson(workerData.chainConfigJson), workerData.genesisValidatorsRoot); -const db = new BeaconDb(config, await LevelDbController.create({name: workerData.dbLocation}, {logger})); +const controller = + workerData.dbType === "level" + ? await LevelDbController.create({name: workerData.dbLocation}, {logger}) + : await LmdbController.create({name: workerData.dbLocation}, {logger}); +const db = new BeaconDb(config, controller); const abortController = new AbortController(); diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 091739b7d3da..a874f21e0091 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -42,7 +42,7 @@ import { import {Logger, fromHex, gweiToWei, isErrorAborted, pruneSetToMax, sleep, toRootHex} from "@lodestar/utils"; import {ProcessShutdownCallback} from "@lodestar/validator"; import {GENESIS_EPOCH, ZERO_HASH} from "../constants/index.js"; -import {IBeaconDb} from "../db/index.js"; +import {DatabaseType, IBeaconDb} from "../db/index.js"; import {IEth1ForBlockProduction} from "../eth1/index.js"; import {BuilderStatus} from "../execution/builder/http.js"; import {IExecutionBuilder, IExecutionEngine} from "../execution/index.js"; @@ -208,6 +208,7 @@ export class BeaconChain implements IBeaconChain { config, db, dbName, + dbType, dataDir, logger, processShutdownCallback, @@ -224,6 +225,7 @@ export class BeaconChain implements IBeaconChain { config: BeaconConfig; db: IBeaconDb; dbName: string; + dbType: DatabaseType; dataDir: string; logger: Logger; processShutdownCallback: ProcessShutdownCallback; @@ -413,7 +415,7 @@ export class BeaconChain implements IBeaconChain { this.archiveStore = new ArchiveStore( {db, chain: this, logger: logger as LoggerNode, metrics}, - {...opts, dbName, anchorState: {finalizedCheckpoint: anchorState.finalizedCheckpoint}}, + {...opts, dbName, dbType, anchorState: {finalizedCheckpoint: anchorState.finalizedCheckpoint}}, signal ); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 1e45d188d3f2..986723eeb99b 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -78,3 +78,5 @@ export function getBucketNameByValue(enumValue: T): keyof type } throw new Error("Missing bucket for value " + enumValue); } + +export const bucketNames = Object.keys(Bucket).filter((x) => Number.isNaN(Number.parseInt(x))); diff --git a/packages/beacon-node/src/db/index.ts b/packages/beacon-node/src/db/index.ts index df83fb16e08d..d6db0eb32621 100644 --- a/packages/beacon-node/src/db/index.ts +++ b/packages/beacon-node/src/db/index.ts @@ -1,2 +1,4 @@ export {BeaconDb} from "./beacon.js"; +export {bucketNames} from "./buckets.js"; export type {IBeaconDb} from "./interface.js"; +export type {DatabaseType} from "./options.ts"; diff --git a/packages/beacon-node/src/db/options.ts b/packages/beacon-node/src/db/options.ts index 8c48d25e18af..e5fb42aec731 100644 --- a/packages/beacon-node/src/db/options.ts +++ b/packages/beacon-node/src/db/options.ts @@ -1,7 +1,11 @@ +export type DatabaseType = "level" | "lmdb" | "sqlite"; + export type DatabaseOptions = { name: string; + type: DatabaseType; }; export const defaultDbOptions: DatabaseOptions = { name: "./.tmp/lodestar-db", + type: "level", }; diff --git a/packages/beacon-node/src/index.ts b/packages/beacon-node/src/index.ts index 0791328b828a..2b6400d54bab 100644 --- a/packages/beacon-node/src/index.ts +++ b/packages/beacon-node/src/index.ts @@ -5,7 +5,7 @@ export {RestApiServer} from "./api/rest/base.js"; export {checkAndPersistAnchorState, initStateFromDb, initStateFromEth1} from "./chain/index.js"; export {DbCPStateDatastore} from "./chain/stateCache/datastore/db.js"; export {FileCPStateDatastore} from "./chain/stateCache/datastore/file.js"; -export {BeaconDb, type IBeaconDb} from "./db/index.js"; +export {BeaconDb, type DatabaseType, type IBeaconDb, bucketNames} from "./db/index.js"; export {Eth1Provider, type IEth1Provider} from "./eth1/index.js"; // Export metrics utilities to de-duplicate validator metrics export { diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index 0619534e2320..dc9288ce5fa2 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -214,6 +214,7 @@ export class BeaconNode { dataDir, db, dbName: opts.db.name, + dbType: opts.db.type, logger: logger.child({module: LoggerModule.chain}), processShutdownCallback, metrics, diff --git a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts index 14dd293e150e..61867ee56a85 100644 --- a/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts +++ b/packages/beacon-node/test/perf/chain/produceBlock/produceBlockBody.test.ts @@ -45,6 +45,7 @@ describe("produceBlockBody", () => { db, dataDir: ".", dbName: ".", + dbType: "level", logger, processShutdownCallback: () => {}, metrics: null, diff --git a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts index 7d9b21ff94cf..b52c3efec234 100644 --- a/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts +++ b/packages/beacon-node/test/perf/chain/verifyImportBlocks.test.ts @@ -96,6 +96,7 @@ describe.skip("verify+import blocks - range sync perf test", () => { db, dataDir: ".", dbName: ".", + dbType: "level", logger, processShutdownCallback: () => {}, metrics: null, diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index d405a89968c8..565a9804cada 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -120,6 +120,7 @@ const forkChoiceTest = db: getMockedBeaconDb(), dataDir: ".", dbName: ",", + dbType: "level", logger, processShutdownCallback: () => {}, clock, diff --git a/packages/beacon-node/test/utils/networkWithMockDb.ts b/packages/beacon-node/test/utils/networkWithMockDb.ts index 5572a7cba992..ee457ef46bdd 100644 --- a/packages/beacon-node/test/utils/networkWithMockDb.ts +++ b/packages/beacon-node/test/utils/networkWithMockDb.ts @@ -65,6 +65,7 @@ export async function getNetworkForTest( db, dataDir: ".", dbName: ".", + dbType: "level", logger, processShutdownCallback: () => {}, // set genesis time so that we are at ALTAIR_FORK_EPOCH diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index 9bac06498daf..c4b21b7f3fb4 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -2,9 +2,11 @@ import path from "node:path"; import {getHeapStatistics} from "node:v8"; import {SignableENR} from "@chainsafe/enr"; import {hasher} from "@chainsafe/persistent-merkle-tree"; -import {BeaconDb, BeaconNode} from "@lodestar/beacon-node"; +import {BeaconDb, BeaconNode, bucketNames} from "@lodestar/beacon-node"; import {ChainForkConfig, createBeaconConfig} from "@lodestar/config"; import {LevelDbController} from "@lodestar/db/controller/level"; +import {LmdbController} from "@lodestar/db/controller/lmdb"; +import {SqliteController} from "@lodestar/db/controller/sqlite"; import {LoggerNode, getNodeLogger} from "@lodestar/logger/node"; import {ACTIVE_PRESET, PresetName} from "@lodestar/params"; import {ErrorAborted, bytesToInt, formatBytes} from "@lodestar/utils"; @@ -65,8 +67,22 @@ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise = { type: "string", }, + dbType: { + description: "Database backend type", + choices: ["level", "lmdb", "sqlite"], + default: "level", + hidden: true, + type: "string", + }, + persistInvalidSszObjectsDir: { description: "Enable and specify a directory to persist invalid ssz objects", defaultDescription: defaultBeaconPaths.persistInvalidSszObjectsDir, diff --git a/packages/db/package.json b/packages/db/package.json index d5a195fdda56..206e004ebe0e 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -21,12 +21,24 @@ "./controller/level": { "bun": "./src/controller/level.ts", "import": "./lib/controller/level.js" + }, + "./controller/lmdb": { + "bun": "./src/controller/lmdb.ts", + "import": "./lib/controller/lmdb.js" + }, + "./controller/sqlite": { + "bun": "./src/controller/sqlite_bun.ts", + "import": "./lib/controller/sqlite.js" } }, "imports": { "#controller/level": { "bun": "./src/controller/level.ts", "import": "./src/controller/level.js" + }, + "#controller/sqlite": { + "bun": "./src/controller/sqlite_bun.ts", + "import": "./src/controller/sqlite.js" } }, "files": [ @@ -54,7 +66,8 @@ "@lodestar/config": "^1.35.0", "@lodestar/utils": "^1.35.0", "classic-level": "^1.4.1", - "it-all": "^3.0.4" + "it-all": "^3.0.4", + "lmdb": "^3.4.3" }, "devDependencies": { "@lodestar/logger": "^1.35.0" diff --git a/packages/db/src/abstractPrefixedRepository.ts b/packages/db/src/abstractPrefixedRepository.ts index d27e630d165a..89cec239f3df 100644 --- a/packages/db/src/abstractPrefixedRepository.ts +++ b/packages/db/src/abstractPrefixedRepository.ts @@ -2,7 +2,7 @@ import {Type} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {BUCKET_LENGTH} from "./const.js"; import {KeyValue} from "./controller/index.js"; -import {Db, DbReqOpts, FilterOptions} from "./controller/interface.js"; +import {ControllerFilterOptions, Db, DbReqOpts, FilterOptions} from "./controller/interface.js"; import {encodeKey} from "./util.js"; type Id = Uint8Array | string | number | bigint; @@ -238,7 +238,7 @@ export abstract class PrefixedRepository { if (opts?.reverse !== undefined) optsBuff.reverse = opts.reverse; if (opts?.limit !== undefined) optsBuff.limit = opts.limit; - const data = await this.db.keys(optsBuff); + const data = await this.db.keys(optsBuff as ControllerFilterOptions); return (data ?? []).map((data) => this.decodeKeyRaw(this.unwrapKey(data))); } } diff --git a/packages/db/src/abstractRepository.ts b/packages/db/src/abstractRepository.ts index c42c9459b2ca..23a04cb2dfc7 100644 --- a/packages/db/src/abstractRepository.ts +++ b/packages/db/src/abstractRepository.ts @@ -2,7 +2,7 @@ import {Type} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {BUCKET_LENGTH} from "./const.js"; import {FilterOptions, KeyValue} from "./controller/index.js"; -import {Db, DbReqOpts} from "./controller/interface.js"; +import {ControllerFilterOptions, Db, DbReqOpts} from "./controller/interface.js"; import {encodeKey as _encodeKey} from "./util.js"; export type Id = Uint8Array | string | number | bigint; @@ -252,8 +252,8 @@ export abstract class Repository { /** * Transforms opts from I to Uint8Array */ - protected dbFilterOptions(opts?: FilterOptions): FilterOptions { - const optsBuff: FilterOptions = { + protected dbFilterOptions(opts?: FilterOptions): ControllerFilterOptions { + const optsBuff: ControllerFilterOptions = { bucketId: this.bucketId, }; diff --git a/packages/db/src/controller/interface.ts b/packages/db/src/controller/interface.ts index 7a3a3d7f96bc..cbf73615d6e1 100644 --- a/packages/db/src/controller/interface.ts +++ b/packages/db/src/controller/interface.ts @@ -18,9 +18,20 @@ export interface FilterOptions { bucketId?: string; } +export interface ControllerFilterOptions { + gt?: K; + gte?: K; + lt?: K; + lte?: K; + reverse?: boolean; + limit?: number; + /** For metrics */ + bucketId: string; +} + export type DbReqOpts = { /** For metrics */ - bucketId?: string; + bucketId: string; }; export interface KeyValue { @@ -38,25 +49,24 @@ export interface DatabaseController { // Core API - get(key: K, opts?: DbReqOpts): Promise; - getMany(key: K[], opts?: DbReqOpts): Promise<(V | undefined)[]>; - - put(key: K, value: V, opts?: DbReqOpts): Promise; - delete(key: K, opts?: DbReqOpts): Promise; + get(key: K, opts: DbReqOpts): Promise; + getMany(key: K[], opts: DbReqOpts): Promise<(V | undefined)[]>; + put(key: K, value: V, opts: DbReqOpts): Promise; + delete(key: K, opts: DbReqOpts): Promise; // Batch operations - batchPut(items: KeyValue[], opts?: DbReqOpts): Promise; - batchDelete(keys: K[], opts?: DbReqOpts): Promise; + batchPut(items: KeyValue[], opts: DbReqOpts): Promise; + batchDelete(keys: K[], opts: DbReqOpts): Promise; // Iterate over entries - keysStream(opts?: FilterOptions): AsyncIterable; - keys(opts?: FilterOptions): Promise; + keysStream(opts: ControllerFilterOptions): AsyncIterable; + keys(opts: ControllerFilterOptions): Promise; - valuesStream(opts?: FilterOptions): AsyncIterable; - values(opts?: FilterOptions): Promise; + valuesStream(opts: ControllerFilterOptions): AsyncIterable; + values(opts: ControllerFilterOptions): Promise; - entriesStream(opts?: FilterOptions): AsyncIterable>; - entries(opts?: FilterOptions): Promise[]>; + entriesStream(opts: ControllerFilterOptions): AsyncIterable>; + entries(opts: ControllerFilterOptions): Promise[]>; } diff --git a/packages/db/src/controller/level.ts b/packages/db/src/controller/level.ts index a945634ba58d..f7931796964f 100644 --- a/packages/db/src/controller/level.ts +++ b/packages/db/src/controller/level.ts @@ -90,7 +90,7 @@ export class LevelDbController implements DatabaseController { + async get(key: Uint8Array, opts: DbReqOpts): Promise { try { this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); @@ -109,13 +109,13 @@ export class LevelDbController implements DatabaseController { - this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); - this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + async getMany(keys: Uint8Array[], opts: DbReqOpts): Promise<(Uint8Array | undefined)[]> { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); return await this.db.getMany(keys); } - put(key: Uint8Array, value: Uint8Array, opts?: DbReqOpts): Promise { + put(key: Uint8Array, value: Uint8Array, opts: DbReqOpts): Promise { this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); @@ -143,15 +143,15 @@ export class LevelDbController implements DatabaseController ({type: "del", key: key}))); } - keysStream(opts: FilterOptions = {}): AsyncIterable { + keysStream(opts: FilterOptions): AsyncIterable { return this.metricsIterator(this.db.keys(opts), (key) => key, opts.bucketId ?? BUCKET_ID_UNKNOWN); } - valuesStream(opts: FilterOptions = {}): AsyncIterable { + valuesStream(opts: FilterOptions): AsyncIterable { return this.metricsIterator(this.db.values(opts), (value) => value, opts.bucketId ?? BUCKET_ID_UNKNOWN); } - entriesStream(opts: FilterOptions = {}): AsyncIterable> { + entriesStream(opts: FilterOptions): AsyncIterable> { return this.metricsIterator( this.db.iterator(opts), (entry) => ({key: entry[0], value: entry[1]}), @@ -159,15 +159,15 @@ export class LevelDbController implements DatabaseController = {}): Promise { + keys(opts: FilterOptions): Promise { return this.metricsAll(this.db.keys(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN); } - values(opts: FilterOptions = {}): Promise { + values(opts: FilterOptions): Promise { return this.metricsAll(this.db.values(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN); } - async entries(opts: FilterOptions = {}): Promise[]> { + async entries(opts: FilterOptions): Promise[]> { const entries = await this.metricsAll(this.db.iterator(opts).all(), opts.bucketId ?? BUCKET_ID_UNKNOWN); return entries.map((entry) => ({key: entry[0], value: entry[1]})); } diff --git a/packages/db/src/controller/lmdb.ts b/packages/db/src/controller/lmdb.ts new file mode 100644 index 000000000000..15d35c1e8954 --- /dev/null +++ b/packages/db/src/controller/lmdb.ts @@ -0,0 +1,265 @@ +import {type Database, open} from "lmdb"; +import {Logger} from "@lodestar/utils"; +import {DatabaseController, DatabaseOptions, DbReqOpts, FilterOptions, KeyValue} from "./interface.ts"; +import {LevelDbControllerMetrics} from "./metrics.ts"; + +export type LmdbControllerModules = { + logger: Logger; + metrics?: LevelDbControllerMetrics | null; +}; + +const BUCKET_ID_UNKNOWN = "unknown"; + +export class LmdbController implements DatabaseController { + db: Database; + metrics: LevelDbControllerMetrics | null; + + constructor(path: string, metrics: LevelDbControllerMetrics | null) { + this.db = open({ + path, + encoding: "binary", + }); + this.metrics = metrics; + } + + static async create(options: DatabaseOptions, {metrics}: LmdbControllerModules): Promise { + return new LmdbController(options.name, metrics ?? null); + } + static async destroy(_location: string): Promise {} + + close(): Promise { + return this.db.close(); + } + + setMetrics(metrics: LevelDbControllerMetrics): void { + if (this.metrics !== null) { + throw Error("metrics can only be set once"); + } + this.metrics = metrics; + } + + get(key: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + return Promise.resolve(this.db.get(key) ?? null); + } + + getMany(keys: Uint8Array[], opts: DbReqOpts): Promise<(Uint8Array | undefined)[]> { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + return this.db.getMany(keys); + } + + async put(key: Uint8Array, value: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + await this.db.put(key, value); + } + + async delete(key: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + await this.db.remove(key); + } + + async batchPut(items: KeyValue[], opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, items.length); + await this.db.batch(() => { + for (const {key, value} of items) { + this.db.put(key, value); + } + }); + } + + async batchDelete(keys: Uint8Array[], opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + await this.db.batch(() => { + for (const key of keys) { + this.db.remove(key); + } + }); + } + + async *keysStream(opts?: FilterOptions | undefined): AsyncIterable { + const keys = this.db + .getKeys({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter((key) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }); + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + let itemsRead = 0; + for (const key of keys) { + itemsRead++; + yield key; + } + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, itemsRead); + } + + keys(opts?: FilterOptions | undefined): Promise { + const keys = this.db + .getKeys({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter((key) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }).asArray; + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + return Promise.resolve(keys); + } + + async *valuesStream(opts?: FilterOptions | undefined): AsyncIterable { + const values = this.db + .getRange({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter(({key}) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }); + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + let itemsRead = 0; + for (const {value} of values) { + itemsRead++; + yield value; + } + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, itemsRead); + } + + values(opts?: FilterOptions | undefined): Promise { + const values = this.db + .getRange({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter(({key}) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }) + .map(({value}) => value).asArray; + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, values.length); + return Promise.resolve(values); + } + + async *entriesStream(opts?: FilterOptions | undefined): AsyncIterable> { + const entries = this.db + .getRange({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter(({key}) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }); + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + let itemsRead = 0; + for (const entry of entries) { + itemsRead++; + yield entry; + } + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, itemsRead); + } + + entries(opts?: FilterOptions | undefined): Promise[]> { + const entries = this.db + .getRange({ + start: opts?.gt ?? opts?.gte, + end: opts?.lt ?? opts?.lte, + reverse: opts?.reverse, + limit: opts?.limit, + }) + .filter(({key}) => { + if (opts?.gt !== undefined && Buffer.compare(key, opts.gt) <= 0) { + return false; + } + if (opts?.gte !== undefined && Buffer.compare(key, opts.gte) < 0) { + return false; + } + if (opts?.lt !== undefined && Buffer.compare(key, opts.lt) >= 0) { + return false; + } + if (opts?.lte !== undefined && Buffer.compare(key, opts.lte) > 0) { + return false; + } + return true; + }).asArray; + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, entries.length); + return Promise.resolve(entries); + } +} diff --git a/packages/db/src/controller/lmdb_bun.ts b/packages/db/src/controller/lmdb_bun.ts new file mode 100644 index 000000000000..5699cb4f225d --- /dev/null +++ b/packages/db/src/controller/lmdb_bun.ts @@ -0,0 +1,444 @@ +import {lmdb} from "@lodestar/bun"; +import {Logger} from "@lodestar/utils"; +import {DatabaseController, DatabaseOptions, DbReqOpts, FilterOptions, KeyValue} from "./interface.ts"; +import {LevelDbControllerMetrics} from "./metrics.ts"; + +const { + cursorDeinit, + cursorGetCurrentValue, + cursorGoToFirst, + cursorGoToNext, + cursorSeek, + databaseCursor, + databaseDelete, + databaseGet, + databaseOpen, + databaseSet, + environmentDeinit, + environmentInit, + transactionAbort, + transactionBegin, + transactionCommit, +} = lmdb; + +export type LmdbControllerModules = { + logger: Logger; + metrics?: LevelDbControllerMetrics | null; +}; + +export enum Status { + started = "started", + closed = "closed", +} + +const BUCKET_ID_UNKNOWN = "unknown"; + +export class LmdbController implements DatabaseController { + private status = Status.started; + + constructor( + private readonly db: lmdb.Environment, + private metrics: LevelDbControllerMetrics | null + ) {} + + static async create(options: DatabaseOptions, {metrics}: LmdbControllerModules): Promise { + const db = environmentInit(options.name, {mapSize: 500_000_000_000}); + return new LmdbController(db, metrics ?? null); + } + + static async destroy(_location: string): Promise { + // not implemented + } + + async close(): Promise { + if (this.status === Status.closed) { + return; + } + this.status = Status.closed; + + environmentDeinit(this.db); + } + + setMetrics(metrics: LevelDbControllerMetrics): void { + if (this.metrics !== null) { + throw new Error("Metrics already set"); + } + this.metrics = metrics; + } + + async get(key: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + + const raw = databaseGet(tx, db, key); + const value = raw ? raw.slice() : null; + + transactionAbort(tx); + + return value; + } + + async getMany(keys: Uint8Array[], opts?: DbReqOpts): Promise<(Uint8Array | undefined)[]> { + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + + const values = []; + for (const key of keys) { + const raw = databaseGet(tx, db, key); + values.push(raw ? raw.slice() : undefined); + } + transactionAbort(tx); + + return values; + } + + async put(key: Uint8Array, value: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + + const tx = transactionBegin(this.db, false); + const db = databaseOpen(tx, null); + + databaseSet(tx, db, key, value); + + transactionCommit(tx); + } + + async delete(key: Uint8Array, opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + + const tx = transactionBegin(this.db, false); + const db = databaseOpen(tx, null); + + databaseDelete(tx, db, key); + + transactionCommit(tx); + } + + async batchPut(items: KeyValue[], opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, items.length); + + const tx = transactionBegin(this.db, false); + const db = databaseOpen(tx, null); + + for (const {key, value} of items) { + databaseSet(tx, db, key, value); + } + + transactionCommit(tx); + } + + async batchDelete(keys: Uint8Array[], opts?: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + + const tx = transactionBegin(this.db, false); + const db = databaseOpen(tx, null); + + for (const key of keys) { + databaseDelete(tx, db, key); + } + + transactionCommit(tx); + } + + keysStream(opts: FilterOptions = {}): AsyncIterable { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + + const iterator = databaseCursor(tx, db); + + const metrics = this.metrics; + const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN; + metrics?.dbReadReq.inc({bucket}, 1); + let itemsRead = 0; + + return (async function* () { + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + itemsRead++; + yield key.slice(); + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + itemsRead++; + yield key.slice(); + } else { + const key = cursorGoToFirst(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + itemsRead++; + yield key.slice(); + } + + while (true) { + const key = cursorGoToNext(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + itemsRead++; + yield key.slice(); + } + } finally { + metrics?.dbReadItems.inc({bucket}, itemsRead); + cursorDeinit(iterator); + transactionAbort(tx); + } + })(); + } + + async keys(opts: FilterOptions = {}): Promise { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + const iterator = databaseCursor(tx, db); + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + const keys: Uint8Array[] = []; + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator); + if (!key) return keys; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return keys; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return keys; + keys.push(key.slice()); + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte); + if (!key) return keys; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return keys; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return keys; + keys.push(key.slice()); + } else { + const key = cursorGoToFirst(iterator); + if (!key) return keys; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return keys; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return keys; + keys.push(key.slice()); + } + while (true) { + const key = cursorGoToNext(iterator); + if (!key) break; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + keys.push(key.slice()); + } + return keys; + } finally { + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, keys.length); + cursorDeinit(iterator); + transactionAbort(tx); + } + } + + valuesStream(opts: FilterOptions = {}): AsyncIterable { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + const iterator = databaseCursor(tx, db); + const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN; + const metrics = this.metrics; + metrics?.dbReadReq.inc({bucket}, 1); + let itemsRead = 0; + return (async function* () { + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator); + itemsRead++; + yield value.slice(); + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator); + itemsRead++; + yield value.slice(); + } else { + const key = cursorGoToFirst(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator); + itemsRead++; + yield value.slice(); + } + while (true) { + const key = cursorGoToNext(iterator); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + const value = cursorGetCurrentValue(iterator); + itemsRead++; + yield value.slice(); + } + } finally { + metrics?.dbReadItems.inc({bucket}, itemsRead); + cursorDeinit(iterator); + transactionAbort(tx); + } + })(); + } + + async values(opts: FilterOptions = {}): Promise { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + const iterator = databaseCursor(tx, db); + const values: Uint8Array[] = []; + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator); + if (!key) return values; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return values; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return values; + const value = cursorGetCurrentValue(iterator); + values.push(value.slice()); + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte); + if (!key) return values; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return values; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return values; + const value = cursorGetCurrentValue(iterator); + values.push(value.slice()); + } else { + const key = cursorGoToFirst(iterator); + if (!key) return values; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return values; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return values; + const value = cursorGetCurrentValue(iterator); + values.push(value.slice()); + } + while (true) { + const key = cursorGoToNext(iterator); + if (!key) break; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + const value = cursorGetCurrentValue(iterator); + values.push(value.slice()); + } + return values; + } finally { + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, values.length); + cursorDeinit(iterator); + transactionAbort(tx); + } + } + + entriesStream(opts: FilterOptions = {}): AsyncIterable> { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + const iterator = databaseCursor(tx, db); + const bucket = opts.bucketId ?? BUCKET_ID_UNKNOWN; + const metrics = this.metrics; + metrics?.dbReadReq.inc({bucket}, 1); + let itemsRead = 0; + return (async function* () { + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator)?.slice(); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator).slice(); + itemsRead++; + yield {key, value}; + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte)?.slice(); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator).slice(); + itemsRead++; + yield {key, value}; + } else { + const key = cursorGoToFirst(iterator)?.slice(); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return; + const value = cursorGetCurrentValue(iterator).slice(); + itemsRead++; + yield {key, value}; + } + while (true) { + const key = cursorGoToNext(iterator)?.slice(); + if (!key) return; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + const value = cursorGetCurrentValue(iterator).slice(); + itemsRead++; + yield {key, value}; + } + } finally { + metrics?.dbReadItems.inc({bucket}, itemsRead); + cursorDeinit(iterator); + transactionAbort(tx); + } + })(); + } + + async entries(opts: FilterOptions = {}): Promise[]> { + const tx = transactionBegin(this.db); + const db = databaseOpen(tx, null); + const iterator = databaseCursor(tx, db); + const entries: KeyValue[] = []; + this.metrics?.dbReadReq.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, 1); + try { + if (opts.gt) { + cursorSeek(iterator, opts.gt); + const key = cursorGoToNext(iterator)?.slice(); + if (!key) return entries; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return entries; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return entries; + const value = cursorGetCurrentValue(iterator).slice(); + entries.push({key, value}); + } else if (opts.gte) { + const key = cursorSeek(iterator, opts.gte)?.slice(); + if (!key) return entries; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return entries; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return entries; + const value = cursorGetCurrentValue(iterator).slice(); + entries.push({key, value}); + } else { + const key = cursorGoToFirst(iterator)?.slice(); + if (!key) return entries; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) return entries; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) return entries; + const value = cursorGetCurrentValue(iterator).slice(); + entries.push({key, value}); + } + while (true) { + const key = cursorGoToNext(iterator)?.slice(); + if (!key) break; + if (opts.lt && Buffer.compare(key, opts.lt) >= 0) break; + if (opts.lte && Buffer.compare(key, opts.lte) > 0) break; + const value = cursorGetCurrentValue(iterator).slice(); + entries.push({key, value}); + } + return entries; + } finally { + this.metrics?.dbReadItems.inc({bucket: opts?.bucketId ?? BUCKET_ID_UNKNOWN}, entries.length); + cursorDeinit(iterator); + transactionAbort(tx); + } + } +} diff --git a/packages/db/src/controller/sqlite.ts b/packages/db/src/controller/sqlite.ts new file mode 100644 index 000000000000..01bcd97d81b6 --- /dev/null +++ b/packages/db/src/controller/sqlite.ts @@ -0,0 +1,280 @@ +import {DatabaseSync} from "node:sqlite"; +import {Logger} from "@lodestar/utils"; +import { + ControllerFilterOptions, + DatabaseController, + DatabaseOptions, + DbReqOpts, + FilterOptions, + KeyValue, +} from "./interface.ts"; +import {LevelDbControllerMetrics} from "./metrics.ts"; + +enum Status { + started = "started", + closed = "closed", +} + +export type SqliteControllerModules = { + logger: Logger; + metrics?: LevelDbControllerMetrics | null; +}; + +/** + * The Bun SQLite implementation of DB + * + * - `opts.bucketId` is NOT just used for metrics, it is required + * - Each bucket is a separate table + * (key BLOB PRIMARY KEY, value BLOB) + * - `createTables` MUST be called first before any queries + */ +export class SqliteController implements DatabaseController { + private status = Status.started; + + private dbSizeMetricInterval?: NodeJS.Timeout; + + constructor( + private readonly logger: Logger, + private readonly db: DatabaseSync, + private metrics: LevelDbControllerMetrics | null + ) { + this.metrics = metrics ?? null; + + if (this.metrics) { + this.collectDbSizeMetric(); + } + } + + static create(opts: DatabaseOptions, {metrics, logger}: SqliteControllerModules): SqliteController { + const db = new DatabaseSync(opts.name || "beaconchain"); + + // SQLite supports write-ahead log mode (WAL) which dramatically improves performance, + // especially in situations with many concurrent readers and a single writer. + // It's broadly recommended to enable WAL mode for most typical applications. + // see https://bun.sh/docs/api/sqlite#wal-mode + db.exec("PRAGMA journal_mode = WAL;"); + + return new SqliteController(logger, db, metrics ?? null); + } + + async close(): Promise { + if (this.status === Status.closed) return; + this.status = Status.closed; + + if (this.dbSizeMetricInterval) { + clearInterval(this.dbSizeMetricInterval); + } + + this.db.close(); + } + + createTables(bucketIds: string[]): void { + for (const bucketId of bucketIds) { + this.db.exec(`CREATE TABLE IF NOT EXISTS ${bucketId} (key BLOB PRIMARY KEY, value BLOB)`); + } + } + + /** To inject metrics after CLI initialization */ + setMetrics(metrics: LevelDbControllerMetrics): void { + if (this.metrics !== null) { + throw Error("metrics can only be set once"); + } + + this.metrics = metrics; + if (this.status === Status.started) { + this.collectDbSizeMetric(); + } + } + + async clear(): Promise { + throw new Error("unimplemented"); + } + + async get(key: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.prepare(`SELECT value from ${opts.bucketId} WHERE key = ?1`); + return (query.get({1: key})?.value ?? null) as Uint8Array | null; + } + + async getMany(key: Uint8Array[], opts: DbReqOpts): Promise<(Uint8Array | undefined)[]> { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId}, key.length); + + const query = this.db.prepare(`SELECT value from ${opts.bucketId} WHERE key = ?1`); + return key.map((k) => (query.get({1: k})?.value ?? undefined) as Uint8Array | undefined); + } + + async put(key: Uint8Array, value: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.prepare(`INSERT OR REPLACE INTO ${opts.bucketId} VALUES (?1, ?2)`); + query.run({1: key, 2: value}); + } + + async delete(key: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.prepare(`DELETE FROM ${opts.bucketId} WHERE key = ?1`); + query.run({1: key}); + } + + async batchPut(items: KeyValue[], opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, items.length); + + const query = this.db.prepare(`INSERT OR REPLACE INTO ${opts.bucketId} VALUES (?1, ?2)`); + + // TODO use a single transaction for the batch + for (const {key, value} of items) { + query.run({1: key, 2: value}); + } + } + + async batchDelete(keys: Uint8Array[], opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, keys.length); + + const query = this.db.prepare(`DELETE FROM ${opts.bucketId} WHERE key = ?1`); + + // TODO use a single transaction for the batch + for (const key of keys) { + query.run({1: key}); + } + } + + keysStream(opts: ControllerFilterOptions): AsyncIterable { + const query = this.db.prepare(`SELECT key from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const iterator = query.iterate(filterOptsToParams(opts)) as Iterable>; + return this.metricsIterator(iterator, (key) => key.key, opts.bucketId); + } + + valuesStream(opts: ControllerFilterOptions): AsyncIterable { + const query = this.db.prepare(`SELECT value from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const iterator = query.iterate(filterOptsToParams(opts)) as Iterable>; + return this.metricsIterator(iterator, (value) => value.value, opts.bucketId); + } + + entriesStream(opts: ControllerFilterOptions): AsyncIterable> { + const query = this.db.prepare(`SELECT key, value from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const iterator = query.iterate(filterOptsToParams(opts)) as Iterable>; + return this.metricsIterator(iterator, (entry) => entry, opts.bucketId); + } + + async keys(opts: ControllerFilterOptions): Promise { + const query = this.db.prepare(`SELECT key from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const items = query.all(filterOptsToParams(opts)).map((r) => r.key) as Uint8Array[]; + return this.metricsAll(items, opts.bucketId); + } + + async values(opts: ControllerFilterOptions): Promise { + const query = this.db.prepare(`SELECT value from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const items = query.all(filterOptsToParams(opts)).map((r) => r.value) as Uint8Array[]; + return this.metricsAll(items, opts.bucketId); + } + + async entries(opts: ControllerFilterOptions): Promise[]> { + const query = this.db.prepare(`SELECT key, value from ${opts.bucketId} ${filterOptsToClauses(opts)}`); + const items = query.all(filterOptsToParams(opts)) as unknown as KeyValue[]; + return this.metricsAll(items, opts.bucketId); + } + + /** + * Get the approximate number of bytes of file system space used by the range [start..end). + * The result might not include recently written data. + */ + approximateSize(_start: Uint8Array, _end: Uint8Array): Promise { + throw new Error("not implemented"); + } + + /** + * Manually trigger a database compaction in the range [start..end]. + */ + compactRange(_start: Uint8Array, _end: Uint8Array): Promise { + throw new Error("not implemented"); + } + + /** Capture metrics for db.iterator, db.keys, db.values .all() calls */ + private metricsAll(items: T[], bucket: string): T[] { + this.metrics?.dbWriteReq.inc({bucket}, 1); + this.metrics?.dbWriteItems.inc({bucket}, items.length); + return items; + } + + /** Capture metrics for db.iterator, db.keys, db.values AsyncIterable calls */ + private async *metricsIterator( + iterator: Iterable, + getValue: (item: T) => K, + bucket: string + ): AsyncIterable { + this.metrics?.dbWriteReq.inc({bucket}, 1); + + let itemsRead = 0; + + for (const item of iterator) { + // Count metrics after done condition + itemsRead++; + + yield getValue(item); + } + + this.metrics?.dbWriteItems.inc({bucket}, itemsRead); + } + + /** Start interval to capture metric for db size */ + private collectDbSizeMetric(): void { + // TODO implement later + } + + /** Capture metric for db size */ + private dbSizeMetric(): void { + // TODO implement later + } +} + +// IMPORTANT NOTE: order of opts processing matches filterOptsToParams +function filterOptsToClauses(opts: FilterOptions): string { + let clauses = ""; + let clauseIx = 1; + if (opts.gt || opts.gte || opts.lt || opts.lte) { + const whereClauses: string[] = []; + if (opts.gt) whereClauses.push(`key > ?${clauseIx++}`); + if (opts.gte) whereClauses.push(`key >= ?${clauseIx++}`); + if (opts.lt) whereClauses.push(`key < ?${clauseIx++}`); + if (opts.lte) whereClauses.push(`key <= ?${clauseIx++}`); + clauses += `WHERE ${whereClauses.join(" AND ")} `; + } + if (opts.reverse) { + clauses += "ORDER BY key DESC "; + } + if (opts.limit) { + clauses += `LIMIT ${opts.limit} `; + } + return clauses; +} + +// IMPORTANT NOTE: order of opts processing matches filterOptsToClauses +function filterOptsToParams(opts: FilterOptions): Record { + const params: Record = {}; + let clauseIx = 1; + if (opts.gt) { + params[clauseIx] = opts.gt; + clauseIx++; + } + if (opts.gte) { + params[clauseIx] = opts.gte; + clauseIx++; + } + if (opts.lt) { + params[clauseIx] = opts.lt; + clauseIx++; + } + if (opts.lte) { + params[clauseIx] = opts.lte; + clauseIx++; + } + return params; +} diff --git a/packages/db/src/controller/sqlite_bun.ts b/packages/db/src/controller/sqlite_bun.ts new file mode 100644 index 000000000000..20b9f21d8a8c --- /dev/null +++ b/packages/db/src/controller/sqlite_bun.ts @@ -0,0 +1,283 @@ +import {Database} from "bun:sqlite"; +import {Logger} from "@lodestar/utils"; +import { + ControllerFilterOptions, + DatabaseController, + DatabaseOptions, + DbReqOpts, + FilterOptions, + KeyValue, +} from "./interface.js"; +import {LevelDbControllerMetrics} from "./metrics.js"; + +enum Status { + started = "started", + closed = "closed", +} + +export type SqliteControllerModules = { + logger: Logger; + metrics?: LevelDbControllerMetrics | null; +}; + +/** + * The Bun SQLite implementation of DB + * + * - `opts.bucketId` is NOT just used for metrics, it is required + * - Each bucket is a separate table + * (key BLOB PRIMARY KEY, value BLOB) + * - `createTables` MUST be called first before any queries + */ +export class SqliteController implements DatabaseController { + private status = Status.started; + + private dbSizeMetricInterval?: NodeJS.Timeout; + + constructor( + private readonly logger: Logger, + private readonly db: Database, + private metrics: LevelDbControllerMetrics | null + ) { + this.metrics = metrics ?? null; + + if (this.metrics) { + this.collectDbSizeMetric(); + } + } + + static create(opts: DatabaseOptions, {metrics, logger}: SqliteControllerModules): SqliteController { + const db = new Database(opts.name || "beaconchain", {create: true, strict: true}); + + // SQLite supports write-ahead log mode (WAL) which dramatically improves performance, + // especially in situations with many concurrent readers and a single writer. + // It's broadly recommended to enable WAL mode for most typical applications. + // see https://bun.sh/docs/api/sqlite#wal-mode + db.run("PRAGMA journal_mode = WAL;"); + + return new SqliteController(logger, db, metrics ?? null); + } + + async close(): Promise { + if (this.status === Status.closed) return; + this.status = Status.closed; + + if (this.dbSizeMetricInterval) { + clearInterval(this.dbSizeMetricInterval); + } + + this.db.close(); + } + + createTables(bucketIds: string[]): void { + for (const bucketId of bucketIds) { + this.db.run(`CREATE TABLE IF NOT EXISTS ${bucketId} (key BLOB PRIMARY KEY, value BLOB)`); + } + } + + /** To inject metrics after CLI initialization */ + setMetrics(metrics: LevelDbControllerMetrics): void { + if (this.metrics !== null) { + throw Error("metrics can only be set once"); + } + + this.metrics = metrics; + if (this.status === Status.started) { + this.collectDbSizeMetric(); + } + } + + async clear(): Promise { + throw new Error("unimplemented"); + } + + async get(key: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.query<{value: Uint8Array}, Uint8Array[]>(`SELECT value from ${opts.bucketId} WHERE key = ?1`); + return query.get(key)?.value ?? null; + } + + async getMany(key: Uint8Array[], opts: DbReqOpts): Promise<(Uint8Array | undefined)[]> { + this.metrics?.dbReadReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbReadItems.inc({bucket: opts.bucketId}, key.length); + + const query = this.db.query<{value: Uint8Array}, Uint8Array[]>(`SELECT value from ${opts.bucketId} WHERE key = ?1`); + return key.map((k) => query.get(k)?.value ?? undefined); + } + + async put(key: Uint8Array, value: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.query(`INSERT OR REPLACE INTO ${opts.bucketId} VALUES (?1, ?2)`); + query.run(key, value); + } + + async delete(key: Uint8Array, opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, 1); + + const query = this.db.query(`DELETE FROM ${opts.bucketId} WHERE key = ?1`); + query.run(key); + } + + async batchPut(items: KeyValue[], opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, items.length); + + const query = this.db.query(`INSERT OR REPLACE INTO ${opts.bucketId} VALUES (?1, ?2)`); + + const batch = this.db.transaction((items: KeyValue[]) => { + for (const {key, value} of items) { + query.run(key, value); + } + }); + batch(items); + } + + async batchDelete(keys: Uint8Array[], opts: DbReqOpts): Promise { + this.metrics?.dbWriteReq.inc({bucket: opts.bucketId}, 1); + this.metrics?.dbWriteItems.inc({bucket: opts.bucketId}, keys.length); + + const query = this.db.query(`DELETE FROM ${opts.bucketId} WHERE key = ?1`); + + const batch = this.db.transaction((keys: Uint8Array[]) => { + for (const key of keys) { + query.run(key); + } + }); + batch(keys); + } + + keysStream(opts: ControllerFilterOptions): AsyncIterable { + const query = this.db.query<{key: Uint8Array}, Uint8Array[]>( + `SELECT key from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const iterator = query.iterate(...filterOptsToParams(opts)); + return this.metricsIterator(iterator, (key) => key.key, opts.bucketId); + } + + valuesStream(opts: ControllerFilterOptions): AsyncIterable { + const query = this.db.query<{value: Uint8Array}, Uint8Array[]>( + `SELECT value from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const iterator = query.iterate(...filterOptsToParams(opts)); + return this.metricsIterator(iterator, (value) => value.value, opts.bucketId); + } + + entriesStream(opts: ControllerFilterOptions): AsyncIterable> { + const query = this.db.query<{key: Uint8Array; value: Uint8Array}, Uint8Array[]>( + `SELECT key, value from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const iterator = query.iterate(...filterOptsToParams(opts)); + return this.metricsIterator(iterator, (entry) => entry, opts.bucketId); + } + + async keys(opts: ControllerFilterOptions): Promise { + const query = this.db.query<{key: Uint8Array}, Uint8Array[]>( + `SELECT key from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const items = query.values(...filterOptsToParams(opts)).flat() as Uint8Array[]; + return this.metricsAll(items, opts.bucketId); + } + + async values(opts: ControllerFilterOptions): Promise { + const query = this.db.query<{value: Uint8Array}, Uint8Array[]>( + `SELECT value from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const items = query.values(...filterOptsToParams(opts)).flat() as Uint8Array[]; + return this.metricsAll(items, opts.bucketId); + } + + async entries(opts: ControllerFilterOptions): Promise[]> { + const query = this.db.query<{key: Uint8Array; value: Uint8Array}, Uint8Array[]>( + `SELECT key, value from ${opts.bucketId} ${filterOptsToClauses(opts)}` + ); + const items = query.all(...filterOptsToParams(opts)); + return this.metricsAll(items, opts.bucketId); + } + + /** + * Get the approximate number of bytes of file system space used by the range [start..end). + * The result might not include recently written data. + */ + approximateSize(_start: Uint8Array, _end: Uint8Array): Promise { + throw new Error("not implemented"); + } + + /** + * Manually trigger a database compaction in the range [start..end]. + */ + compactRange(_start: Uint8Array, _end: Uint8Array): Promise { + throw new Error("not implemented"); + } + + /** Capture metrics for db.iterator, db.keys, db.values .all() calls */ + private metricsAll(items: T[], bucket: string): T[] { + this.metrics?.dbWriteReq.inc({bucket}, 1); + this.metrics?.dbWriteItems.inc({bucket}, items.length); + return items; + } + + /** Capture metrics for db.iterator, db.keys, db.values AsyncIterable calls */ + private async *metricsIterator( + iterator: Iterable, + getValue: (item: T) => K, + bucket: string + ): AsyncIterable { + this.metrics?.dbWriteReq.inc({bucket}, 1); + + let itemsRead = 0; + + for (const item of iterator) { + // Count metrics after done condition + itemsRead++; + + yield getValue(item); + } + + this.metrics?.dbWriteItems.inc({bucket}, itemsRead); + } + + /** Start interval to capture metric for db size */ + private collectDbSizeMetric(): void { + // TODO implement later + } + + /** Capture metric for db size */ + private dbSizeMetric(): void { + // TODO implement later + } +} + +// IMPORTANT NOTE: order of opts processing matches filterOptsToParams +function filterOptsToClauses(opts: FilterOptions): string { + let clauses = ""; + let clauseIx = 1; + if (opts.gt || opts.gte || opts.lt || opts.lte) { + const whereClauses: string[] = []; + if (opts.gt) whereClauses.push(`key > ?${clauseIx++}`); + if (opts.gte) whereClauses.push(`key >= ?${clauseIx++}`); + if (opts.lt) whereClauses.push(`key < ?${clauseIx++}`); + if (opts.lte) whereClauses.push(`key <= ?${clauseIx++}`); + clauses += `WHERE ${whereClauses.join(" AND ")} `; + } + if (opts.reverse) { + clauses += "ORDER BY key DESC "; + } + if (opts.limit) { + clauses += `LIMIT ${opts.limit} `; + } + return clauses; +} + +// IMPORTANT NOTE: order of opts processing matches filterOptsToClauses +function filterOptsToParams(opts: FilterOptions): Uint8Array[] { + const params: Uint8Array[] = []; + if (opts.gt) params.push(opts.gt); + if (opts.gte) params.push(opts.gte); + if (opts.lt) params.push(opts.lt); + if (opts.lte) params.push(opts.lte); + return params; +} diff --git a/packages/db/test/unit/controller/level.test.ts b/packages/db/test/unit/controller/level.test.ts index b08b9dc3a842..db6fd64d1eaf 100644 --- a/packages/db/test/unit/controller/level.test.ts +++ b/packages/db/test/unit/controller/level.test.ts @@ -8,6 +8,7 @@ import {getEnvLogger} from "@lodestar/logger/env"; describe("LevelDB controller", () => { const dbLocation = "./.__testdb"; let db: LevelDbController; + const opts = {bucketId: "unknown"}; beforeEach(async () => { db = await LevelDbController.create({name: dbLocation}, {metrics: null, logger: getEnvLogger()}); @@ -20,30 +21,30 @@ describe("LevelDB controller", () => { it("test get not found", async () => { const key = Buffer.from("not-existing-key"); - expect(await db.get(key)).toBe(null); + expect(await db.get(key, opts)).toBe(null); }); it("test put/get/delete", async () => { const key = Buffer.from("test"); const value = Buffer.from("some value"); - await db.put(key, value); - expect(await db.get(key)).toEqual(value); - await db.delete(key); - expect(await db.get(key)).toBe(null); + await db.put(key, value, opts); + expect(await db.get(key, opts)).toEqual(value); + await db.delete(key, opts); + expect(await db.get(key, opts)).toBe(null); }); it("test getMany", async () => { const key1 = Buffer.from("test 1"); const value1 = Buffer.from("some value 1"); - await db.put(key1, value1); + await db.put(key1, value1, opts); const key2 = Buffer.from("test 2"); const value2 = Buffer.from("some value 2"); - await db.put(key2, value2); + await db.put(key2, value2, opts); - await expect(db.getMany([key1, key2])).resolves.toEqual([value1, value2]); + await expect(db.getMany([key1, key2], opts)).resolves.toEqual([value1, value2]); await db.delete(key1); - await expect(db.getMany([key1, key2])).resolves.toEqual([undefined, value2]); + await expect(db.getMany([key1, key2], opts)).resolves.toEqual([undefined, value2]); }); it("test batchPut", async () => { @@ -59,12 +60,12 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect(await db.get(k1)).not.toBeNull(); - expect(await db.get(k2)).not.toBeNull(); + expect(await db.get(k1, opts)).not.toBeNull(); + expect(await db.get(k2, opts)).not.toBeNull(); }); it("test batch delete", async () => { - await db.batchDelete(await db.keys()); + await db.batchDelete(await db.keys(opts)); const k1 = Buffer.from("test1"); const k2 = Buffer.from("test2"); await db.batchPut([ @@ -77,9 +78,9 @@ describe("LevelDB controller", () => { value: Buffer.from("value"), }, ]); - expect((await db.entries()).length).toBe(2); + expect((await db.entries(opts)).length).toBe(2); await db.batchDelete([k1, k2]); - expect((await db.entries()).length).toBe(0); + expect((await db.entries(opts)).length).toBe(0); }); it("test entries", async () => { diff --git a/packages/db/test/unit/controller/sqlite.test.ts b/packages/db/test/unit/controller/sqlite.test.ts new file mode 100644 index 000000000000..14452a6ac5c7 --- /dev/null +++ b/packages/db/test/unit/controller/sqlite.test.ts @@ -0,0 +1,134 @@ +import all from "it-all"; +import {afterAll, beforeAll, describe, expect, it} from "vitest"; +import {SqliteController} from "#controller/sqlite"; +import {getEnvLogger} from "@lodestar/logger/env"; + +describe("sqlite controller", () => { + const dbLocation = ":memory:"; + const bucketId = "test"; + let db: SqliteController; + + beforeAll(async () => { + db = SqliteController.create({name: dbLocation}, {metrics: null, logger: getEnvLogger()}); + db.createTables([bucketId]); + }); + + afterAll(async () => { + await db.close(); + }); + + it("test get not found", async () => { + const key = Buffer.from("not-existing-key"); + expect(await db.get(key, {bucketId})).toBe(null); + }); + + it("test put/get/delete", async () => { + const key = Buffer.from("test"); + const value = new Uint8Array(Buffer.from("some value")); + await db.put(key, value, {bucketId}); + expect(await db.get(key, {bucketId})).toEqual(value); + await db.delete(key, {bucketId}); + expect(await db.get(key, {bucketId})).toBe(null); + }); + + it("test batchPut", async () => { + const k1 = Buffer.from("test1"); + const k2 = Buffer.from("test2"); + await db.batchPut( + [ + { + key: k1, + value: Buffer.from("value"), + }, + { + key: k2, + value: Buffer.from("value"), + }, + ], + {bucketId} + ); + expect(await db.get(k1, {bucketId})).not.toBeNull(); + expect(await db.get(k2, {bucketId})).not.toBeNull(); + }); + + it("test batch delete", async () => { + await db.batchDelete(await db.keys({bucketId}), {bucketId}); + const k1 = Buffer.from("test1"); + const k2 = Buffer.from("test2"); + await db.batchPut( + [ + { + key: k1, + value: Buffer.from("value"), + }, + { + key: k2, + value: Buffer.from("value"), + }, + ], + {bucketId} + ); + expect((await db.entries({bucketId})).length).toBe(2); + await db.batchDelete([k1, k2], {bucketId}); + expect((await db.entries({bucketId})).length).toBe(0); + }); + + it("test entries", async () => { + const k1 = Buffer.from("test1"); + const k2 = Buffer.from("test2"); + await db.batchPut( + [ + { + key: k1, + value: Buffer.from("value"), + }, + { + key: k2, + value: Buffer.from("value"), + }, + ], + {bucketId} + ); + const result = await db.entries({ + gte: k1, + lte: k2, + bucketId, + }); + expect(result.length).toBe(2); + }); + + it("test entriesStream", async () => { + const k1 = Buffer.from("test3"); + const k2 = Buffer.from("test4"); + await db.batchPut( + [ + { + key: k1, + value: Buffer.from("value"), + }, + { + key: k2, + value: Buffer.from("value"), + }, + ], + {bucketId} + ); + const resultStream = db.entriesStream({ + gte: k1, + lte: k2, + reverse: true, + bucketId, + }); + const result = await all(resultStream); + expect(result.length).toBe(2); + + const resultStream2 = db.entriesStream({ + gte: k1, + lte: k2, + reverse: true, + bucketId, + }); + const result2 = await all(resultStream2); + expect(result2.length).toBe(2); + }); +}); diff --git a/yarn.lock b/yarn.lock index 31858f42245f..41ffad265909 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2138,6 +2138,41 @@ uint8arraylist "^2.4.8" uint8arrays "^5.1.0" +"@lmdb/lmdb-darwin-arm64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-darwin-arm64/-/lmdb-darwin-arm64-3.4.3.tgz#dc344d7c5af6355ce4c739533971c0dcd9e3ce7c" + integrity sha512-zR6Y45VNtW5s+A+4AyhrJk0VJKhXdkLhrySCpCu7PSdnakebsOzNxf58p5Xoq66vOSuueGAxlqDAF49HwdrSTQ== + +"@lmdb/lmdb-darwin-x64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-darwin-x64/-/lmdb-darwin-x64-3.4.3.tgz#4657af3c309f41038a312cddc16bf9b2b8e5092c" + integrity sha512-nfGm5pQksBGfaj9uMbjC0YyQreny/Pl7mIDtHtw6g7WQuCgeLullr9FNRsYyKplaEJBPrCVpEjpAznxTBIrXBw== + +"@lmdb/lmdb-linux-arm64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-linux-arm64/-/lmdb-linux-arm64-3.4.3.tgz#6bd3ec6707e489a8863054c77c99575492cbd662" + integrity sha512-uX9eaPqWb740wg5D3TCvU/js23lSRSKT7lJrrQ8IuEG/VLgpPlxO3lHDywU44yFYdGS7pElBn6ioKFKhvALZlw== + +"@lmdb/lmdb-linux-arm@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-linux-arm/-/lmdb-linux-arm-3.4.3.tgz#7c417f96eaeadd860ff310860b7e918bb19915d3" + integrity sha512-Kjqomp7i0rgSbYSUmv9JnXpS55zYT/YcW3Bdf9oqOTjcH0/8tFAP8MLhu/i9V2pMKIURDZk63Ww49DTK0T3c/Q== + +"@lmdb/lmdb-linux-x64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-linux-x64/-/lmdb-linux-x64-3.4.3.tgz#231836e23e8d8f53532a47b5756b8096abb1dc57" + integrity sha512-7/8l20D55CfwdMupkc3fNxNJdn4bHsti2X0cp6PwiXlLeSFvAfWs5kCCx+2Cyje4l4GtN//LtKWjTru/9hDJQg== + +"@lmdb/lmdb-win32-arm64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-win32-arm64/-/lmdb-win32-arm64-3.4.3.tgz#d2b91a43b5ca3cb30af941a222b7eeeef433ac6e" + integrity sha512-yWVR0e5Gl35EGJBsAuqPOdjtUYuN8CcTLKrqpQFoM+KsMadViVCulhKNhkcjSGJB88Am5bRPjMro4MBB9FS23Q== + +"@lmdb/lmdb-win32-x64@3.4.3": + version "3.4.3" + resolved "https://registry.yarnpkg.com/@lmdb/lmdb-win32-x64/-/lmdb-win32-x64-3.4.3.tgz#bb4fbcdddfbb4683ecf7bb28045414bfed90afb0" + integrity sha512-1JdBkcO0Vrua4LUgr4jAe4FUyluwCeq/pDkBrlaVjX3/BBWP1TzVjCL+TibWNQtPAL1BITXPAhlK5Ru4FBd/hg== + "@lodestar/bun@git+https://github.com/ChainSafe/lodestar-bun.git": version "0.1.0" resolved "git+https://github.com/ChainSafe/lodestar-bun.git#af71ca31b147fcce8516c41e0206e44f4120430b" @@ -2192,6 +2227,36 @@ resolved "https://registry.yarnpkg.com/@microsoft/tsdoc/-/tsdoc-0.15.1.tgz#d4f6937353bc4568292654efb0a0e0532adbcba2" integrity sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw== +"@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz#9edec61b22c3082018a79f6d1c30289ddf3d9d11" + integrity sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw== + +"@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-darwin-x64/-/msgpackr-extract-darwin-x64-3.0.3.tgz#33677a275204898ad8acbf62734fc4dc0b6a4855" + integrity sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw== + +"@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm64/-/msgpackr-extract-linux-arm64-3.0.3.tgz#19edf7cdc2e7063ee328403c1d895a86dd28f4bb" + integrity sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg== + +"@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-arm/-/msgpackr-extract-linux-arm-3.0.3.tgz#94fb0543ba2e28766c3fc439cabbe0440ae70159" + integrity sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw== + +"@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz#4a0609ab5fe44d07c9c60a11e4484d3c38bbd6e3" + integrity sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg== + +"@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@msgpackr-extract/msgpackr-extract-win32-x64/-/msgpackr-extract-win32-x64-3.0.3.tgz#0aa5502d547b57abfc4ac492de68e2006e417242" + integrity sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ== + "@mswjs/interceptors@^0.39.1": version "0.39.2" resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.39.2.tgz#de9de0ab23f99d387c7904df7219a92157d1d666" @@ -5801,6 +5866,11 @@ detect-indent@^5.0.0: resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-5.0.0.tgz#3871cc0a6a002e8c3e5b3cf7f336264675f06b9d" integrity sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g== +detect-libc@^2.0.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.1.2.tgz#689c5dcdc1900ef5583a4cb9f6d7b473742074ad" + integrity sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ== + detect-node@^2.0.4: version "2.1.0" resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" @@ -5840,16 +5910,6 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" -dns-over-http-resolver@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/dns-over-http-resolver/-/dns-over-http-resolver-2.1.3.tgz#bb7f2e10cc18d960339a6e30e21b8c1d99be7b38" - integrity sha512-zjRYFhq+CsxPAouQWzOsxNMvEN+SHisjzhX8EMxd2Y0EG3thvn6wXQgMJLnTDImkhe4jhLbOQpXtL10nALBOSA== - dependencies: - debug "^4.3.1" - native-fetch "^4.0.2" - receptacle "^1.3.2" - undici "^5.12.0" - dns-packet@^5.2.2: version "5.6.0" resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.0.tgz#2202c947845c7a63c23ece58f2f70ff6ab4c2f7d" @@ -6014,7 +6074,20 @@ electron@^26.2.2: "@types/node" "^18.11.18" extract-zip "^2.0.1" -elliptic@6.5.4, elliptic@>=6.6.1, elliptic@^6.5.3, elliptic@^6.5.4: +elliptic@6.5.4: + version "6.5.4" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" + integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== + dependencies: + bn.js "^4.11.9" + brorand "^1.1.0" + hash.js "^1.0.0" + hmac-drbg "^1.0.1" + inherits "^2.0.4" + minimalistic-assert "^1.0.1" + minimalistic-crypto-utils "^1.0.1" + +elliptic@^6.5.3, elliptic@^6.5.4: version "6.6.1" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.6.1.tgz#3b8ffb02670bf69e382c7f65bf524c97c5405c06" integrity sha512-RaddvvMatK2LJHqFJ+YA4WysVN5Ita9E35botqIYspQ4TkRAlCicdzKOjlyv/1Za5RyTNn7di//eEV0uTAfe3g== @@ -6865,11 +6938,6 @@ get-caller-file@^2.0.5: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-func-name@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" - integrity sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ== - get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" @@ -8680,6 +8748,25 @@ lines-and-columns@~2.0.3: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-2.0.3.tgz#b2f0badedb556b747020ab8ea7f0373e22efac1b" integrity sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w== +lmdb@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/lmdb/-/lmdb-3.4.3.tgz#47b1875ff67efce752c10bf892b563c1307cf0b3" + integrity sha512-GWV1kVi6uhrXWqe+3NXWO73OYe8fto6q8JMo0HOpk1vf8nEyFWgo4CSNJpIFzsOxOrysVUlcO48qRbQfmKd1gA== + dependencies: + msgpackr "^1.11.2" + node-addon-api "^6.1.0" + node-gyp-build-optional-packages "5.2.2" + ordered-binary "^1.5.3" + weak-lru-cache "^1.2.2" + optionalDependencies: + "@lmdb/lmdb-darwin-arm64" "3.4.3" + "@lmdb/lmdb-darwin-x64" "3.4.3" + "@lmdb/lmdb-linux-arm" "3.4.3" + "@lmdb/lmdb-linux-arm64" "3.4.3" + "@lmdb/lmdb-linux-x64" "3.4.3" + "@lmdb/lmdb-win32-arm64" "3.4.3" + "@lmdb/lmdb-win32-x64" "3.4.3" + load-json-file@6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1" @@ -8862,12 +8949,10 @@ loglevel@^1.6.0: resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.9.2.tgz#c2e028d6c757720107df4e64508530db6621ba08" integrity sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg== -loupe@^2.3.6, loupe@^3.1.0, loupe@^3.1.2, loupe@^3.1.3: - version "2.3.7" - resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.7.tgz#6e69b7d4db7d3ab436328013d37d1c8c3540c697" - integrity sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA== - dependencies: - get-func-name "^2.0.1" +loupe@^3.1.0, loupe@^3.1.2, loupe@^3.1.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/loupe/-/loupe-3.2.1.tgz#0095cf56dc5b7a9a7c08ff5b1a8796ec8ad17e76" + integrity sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ== lower-case@^2.0.2: version "2.0.2" @@ -9414,6 +9499,27 @@ ms@^3.0.0-canary.1: resolved "https://registry.yarnpkg.com/ms/-/ms-3.0.0-canary.1.tgz#c7b34fbce381492fd0b345d1cf56e14d67b77b80" integrity sha512-kh8ARjh8rMN7Du2igDRO9QJnqCb2xYTJxyQYK7vJJS4TvLLmsbyhiKpSW+t+y26gyOyMd0riphX0GeWKU3ky5g== +msgpackr-extract@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz#e9d87023de39ce714872f9e9504e3c1996d61012" + integrity sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA== + dependencies: + node-gyp-build-optional-packages "5.2.2" + optionalDependencies: + "@msgpackr-extract/msgpackr-extract-darwin-arm64" "3.0.3" + "@msgpackr-extract/msgpackr-extract-darwin-x64" "3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-arm" "3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-arm64" "3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.3" + "@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.3" + +msgpackr@^1.11.2: + version "1.11.5" + resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.11.5.tgz#edf0b9d9cb7d8ed6897dd0e42cfb865a2f4b602e" + integrity sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA== + optionalDependencies: + msgpackr-extract "^3.0.2" + msw@^2.7.3: version "2.10.2" resolved "https://registry.yarnpkg.com/msw/-/msw-2.10.2.tgz#e7a56ed0b6865b00a30b4c4a5b59e5388fd48315" @@ -9492,11 +9598,6 @@ nan@^2.16.0, nan@^2.17.0: resolved "https://registry.yarnpkg.com/nan/-/nan-2.20.0.tgz#08c5ea813dd54ed16e5bd6505bf42af4f7838ca3" integrity sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw== -nan@^2.19.0: - version "2.23.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.23.0.tgz#24aa4ddffcc37613a2d2935b97683c1ec96093c6" - integrity sha512-1UxuyYGdoQHcGg87Lkqm3FzefucTa0NAiOcuRsDmysep3c1LVCRK2krrUDafMWtjSG04htvAmvg96+SDknOmgQ== - nanoid@^3.3.8: version "3.3.8" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf" @@ -9507,11 +9608,6 @@ napi-macros@^2.2.2: resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.2.2.tgz#817fef20c3e0e40a963fbf7b37d1600bd0201044" integrity sha512-hmEVtAGYzVQpCKdbQea4skABsdXW4RUh5t5mJ2zzqowJS2OyXZTU1KhDVFhx+NlWZ4ap9mqR9TcDO3LTTttd+g== -native-fetch@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/native-fetch/-/native-fetch-4.0.2.tgz#75c8a44c5f3bb021713e5e24f2846750883e49af" - integrity sha512-4QcVlKFtv2EYVS5MBgsGX5+NWKtbDbIECdUXDBGDMAZXq3Jkv9zf+y8iS7Ub8fEdga3GpYeazp9gauNqXHJOCg== - negotiator@^0.6.3: version "0.6.3" resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" @@ -9545,6 +9641,11 @@ node-addon-api@^3.2.1: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== +node-addon-api@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" + integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== + node-domexception@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" @@ -9573,6 +9674,13 @@ node-fetch@^3.3.2: fetch-blob "^3.1.4" formdata-polyfill "^4.0.10" +node-gyp-build-optional-packages@5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz#522f50c2d53134d7f3a76cd7255de4ab6c96a3a4" + integrity sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw== + dependencies: + detect-libc "^2.0.1" + node-gyp-build@^4.3.0: version "4.5.0" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" @@ -10014,6 +10122,11 @@ ora@^6.1.2: strip-ansi "^7.0.1" wcwidth "^1.0.1" +ordered-binary@^1.5.3: + version "1.6.0" + resolved "https://registry.yarnpkg.com/ordered-binary/-/ordered-binary-1.6.0.tgz#9c490dadc0b1336ca6917d8d41dd474b8c0bff32" + integrity sha512-IQh2aMfMIDbPjI/8a3Edr+PiOpcsB7yo8NdW7aHWVaoR/pcDldunMvnnwbk/auPGqmKeAdxtZl7MHX/QmPwhvQ== + os-browserify@^0.3.0: version "0.3.0" resolved "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz" @@ -10951,13 +11064,6 @@ real-require@^0.2.0: resolved "https://registry.yarnpkg.com/real-require/-/real-require-0.2.0.tgz#209632dea1810be2ae063a6ac084fee7e33fba78" integrity sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg== -receptacle@^1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/receptacle/-/receptacle-1.3.2.tgz#a7994c7efafc7a01d0e2041839dab6c4951360d2" - integrity sha512-HrsFvqZZheusncQRiEE7GatOAETrARKV/lnfYicIm8lbvp/JQOdADOfhjBd2DajvoszEyxSM6RlAAIZgEoeu/A== - dependencies: - ms "^2.1.1" - redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -12600,13 +12706,6 @@ undici-types@~6.21.0: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.21.0.tgz#691d00af3909be93a7faa13be61b3a5b50ef12cb" integrity sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ== -undici@^5.12.0: - version "5.29.0" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.29.0.tgz#419595449ae3f2cdcba3580a2e8903399bd1f5a3" - integrity sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg== - dependencies: - "@fastify/busboy" "^2.0.0" - undici@^5.25.4: version "5.28.5" resolved "https://registry.yarnpkg.com/undici/-/undici-5.28.5.tgz#b2b94b6bf8f1d919bc5a6f31f2c01deb02e54d4b" @@ -12923,6 +13022,11 @@ wcwidth@^1.0.0, wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +weak-lru-cache@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/weak-lru-cache/-/weak-lru-cache-1.2.2.tgz#fdbb6741f36bae9540d12f480ce8254060dccd19" + integrity sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw== + weald@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/weald/-/weald-1.0.4.tgz#8858cf9186869deba58357ae10cf26eaada80bb0"