diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5e79f2d6..5ea9979e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,8 +31,8 @@ jobs: coverage: name: test - #runs-on: ubuntu-latest - runs-on: [self-hosted, linux, x64] + runs-on: ubuntu-latest + #runs-on: [self-hosted, linux, x64] steps: - name: Checkout repository uses: actions/checkout@v3 @@ -51,8 +51,8 @@ jobs: - name: Setup Build env run: | ROOT_DIR=`pwd` - #sudo apt-get install protobuf-compiler -y - #yarn global add arlocal_db3 + sudo apt-get install protobuf-compiler -y + yarn global add arlocal_db3 cd ${ROOT_DIR}/metadata && yarn install cd ${ROOT_DIR}/metadata && npx hardhat test test -e ${ROOT_DIR}/metadata/artifacts/contracts/DB3MetaStore.sol/DB3MetaStore.json && cp -f ${ROOT_DIR}/metadata/artifacts/contracts/DB3MetaStore.sol/DB3MetaStore.json ${ROOT_DIR}/abi/ diff --git a/Cargo.toml b/Cargo.toml index 36cb07a0..fca698ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,6 @@ members = [ "src/sdk", "src/event" ] - [workspace.dependencies] fastcrypto = {git = "https://github.com/MystenLabs/fastcrypto", rev = "306465d4fe04f6c26359d885f3b0a548b661de40"} ethers = {git="https://github.com/imotai/ethers-rs", rev="d526191b7972e8cf4412fee8b71cbf42e0ce7995"} diff --git a/sdk/src/index.ts b/sdk/src/index.ts index 904cd043..b2f161ad 100644 --- a/sdk/src/index.ts +++ b/sdk/src/index.ts @@ -31,7 +31,13 @@ export type { MutationResult, QueryResult, } from './store/types' -export { addDoc, updateDoc, deleteDoc, queryDoc, getDoc } from './store/document_v2' +export { + addDoc, + updateDoc, + deleteDoc, + queryDoc, + getDoc, +} from './store/document_v2' export { SystemConfig, SystemStatus, Version } from './proto/db3_base' export { diff --git a/sdk/src/store/document_v2.ts b/sdk/src/store/document_v2.ts index 9c9baef6..b4e84990 100644 --- a/sdk/src/store/document_v2.ts +++ b/sdk/src/store/document_v2.ts @@ -225,6 +225,13 @@ export async function updateDoc( } } +/** + * Add a document to the collection. + * + * @param col The collection to add the document to. + * @param doc The document to add. + * @returns The ID of the newly added document. + */ export async function addDoc(col: Collection, doc: DocumentData) { const documentMutation: DocumentMutation = { collectionName: col.name, @@ -250,6 +257,7 @@ export async function addDoc(col: Collection, doc: DocumentData) { payload, col.db.client.nonce.toString() ) + if (response.code == 0 && response.items.length > 0) { col.db.client.nonce += 1 return { @@ -259,6 +267,8 @@ export async function addDoc(col: Collection, doc: DocumentData) { id: response.items[0].value, } } else { - throw new Error('fail to create collection') + throw new Error( + 'fail to addDoc, maybe you can syncAccountNonce to resolve the problem' + ) } } diff --git a/sdk/tests/client_v2.test.ts b/sdk/tests/client_v2.test.ts index 0452fe85..894bb85f 100644 --- a/sdk/tests/client_v2.test.ts +++ b/sdk/tests/client_v2.test.ts @@ -36,7 +36,7 @@ import { deleteDoc, updateDoc, queryDoc, - getDoc + getDoc, } from '../src/store/document_v2' import { createFromPrivateKey, @@ -383,7 +383,7 @@ describe('test db3.js client module', () => { try { const doc = await getDoc(collection, 1000000000000) except(1).toBe(0) - } catch(e) {} + } catch (e) {} } { diff --git a/src/node/src/rollup_executor.rs b/src/node/src/rollup_executor.rs index 2956eeaf..597e0f7b 100644 --- a/src/node/src/rollup_executor.rs +++ b/src/node/src/rollup_executor.rs @@ -270,12 +270,15 @@ impl RollupExecutor { return Ok(()); } let now = Instant::now(); + info!( "the next rollup start block {} and the newest block {current_block}", last_end_block ); + self.pending_start_block .store(last_end_block, Ordering::Relaxed); + self.pending_end_block .store(current_block, Ordering::Relaxed); let mutations = self @@ -305,6 +308,7 @@ impl RollupExecutor { self.pending_data_size.store(0, Ordering::Relaxed); self.pending_mutations.store(0, Ordering::Relaxed); } + let (id, reward, num_rows, size) = ar_toolbox .compress_and_upload_record_batch( tx, @@ -318,6 +322,7 @@ impl RollupExecutor { let (evm_cost, tx_hash) = meta_store .update_rollup_step(id.as_str(), network_id) .await?; + let tx_str = format!("0x{}", hex::encode(tx_hash.as_bytes())); info!("the process rollup done with num mutations {num_rows}, raw data size {memory_size}, compress data size {size} and processed time {} id {} ar cost {} and evm tx {} and cost {}", now.elapsed().as_secs(), @@ -325,6 +330,7 @@ impl RollupExecutor { tx_str.as_str(), evm_cost.as_u64() ); + let record = RollupRecord { end_block: current_block, raw_data_size: memory_size as u64, @@ -338,6 +344,7 @@ impl RollupExecutor { evm_tx: tx_str, evm_cost: evm_cost.as_u64(), }; + self.storage .add_rollup_record(&record) .map_err(|e| DB3Error::RollupError(format!("{e}")))?;