@@ -10,8 +10,9 @@ use ryhope::{
1010 storage:: { updatetree:: UpdateTree , WideLineage } ,
1111 Epoch ,
1212} ;
13+ use serde:: { Deserialize , Serialize } ;
1314use verifiable_db:: query:: {
14- batching :: { NodePath , RowInput , TreePathInputs } ,
15+ api :: { NodePath , RowInput , TreePathInputs } ,
1516 computational_hash_ids:: ColumnIDs ,
1617 universal_circuit:: universal_circuit_inputs:: { ColumnCell , RowCells } ,
1718} ;
@@ -118,54 +119,62 @@ async fn generate_chunks<const CHUNK_SIZE: usize, C: ContextProvider>(
118119 . cloned ( )
119120 . collect :: < BTreeSet < _ > > ( ) ;
120121
121- Ok ( stream:: iter ( sorted_index_values. into_iter ( ) )
122- . then ( async |index_value| {
123- let index_path = index_cache
124- . compute_path ( & index_value, current_epoch)
122+ let prove_rows = async |index_value| {
123+ let index_path = index_cache
124+ . compute_path ( & index_value, current_epoch)
125+ . await
126+ . unwrap_or_else ( || panic ! ( "node with key {index_value} not found in index tree cache" ) ) ;
127+ let proven_rows = if let Some ( matching_rows) =
128+ row_keys_by_epochs. get ( & ( index_value as Epoch ) )
129+ {
130+ let sorted_rows = matching_rows. iter ( ) . collect :: < BTreeSet < _ > > ( ) ;
131+ stream:: iter ( sorted_rows. iter ( ) )
132+ . then ( async |& row_key| {
133+ compute_input_for_row ( & row_cache, row_key, index_value, & index_path, column_ids)
134+ . await
135+ } )
136+ . collect :: < Vec < RowInput > > ( )
137+ . await
138+ } else {
139+ let proven_node = non_existence_inputs
140+ . find_row_node_for_non_existence ( index_value)
125141 . await
126- . unwrap_or_else ( || {
127- panic ! ( "node with key {index_value} not found in index tree cache " )
142+ . unwrap_or_else ( |_ | {
143+ panic ! ( "node for non-existence not found for index value {index_value} " )
128144 } ) ;
129- let proven_rows =
130- if let Some ( matching_rows) = row_keys_by_epochs. get ( & ( index_value as Epoch ) ) {
131- let sorted_rows = matching_rows. iter ( ) . collect :: < BTreeSet < _ > > ( ) ;
132- stream:: iter ( sorted_rows. iter ( ) )
133- . then ( async |& row_key| {
134- compute_input_for_row (
135- & row_cache,
136- row_key,
137- index_value,
138- & index_path,
139- column_ids,
140- )
141- . await
142- } )
143- . collect :: < Vec < RowInput > > ( )
144- . await
145- } else {
146- let proven_node = non_existence_inputs
147- . find_row_node_for_non_existence ( index_value)
148- . await
149- . unwrap_or_else ( |_| {
150- panic ! ( "node for non-existence not found for index value {index_value}" )
151- } ) ;
152- let row_input = compute_input_for_row (
153- non_existence_inputs. row_tree ,
154- & proven_node,
155- index_value,
156- & index_path,
157- column_ids,
158- )
159- . await ;
160- vec ! [ row_input]
161- } ;
162- proven_rows
163- } )
164- . concat ( )
165- . await
145+ let row_input = compute_input_for_row (
146+ non_existence_inputs. row_tree ,
147+ & proven_node,
148+ index_value,
149+ & index_path,
150+ column_ids,
151+ )
152+ . await ;
153+ vec ! [ row_input]
154+ } ;
155+ proven_rows
156+ } ;
157+
158+ // TODO: This implementation causes an error in DQ:
159+ // `implementation of `std::marker::Send` is not general enough`
160+ /*
161+ let chunks = stream::iter(sorted_index_values.into_iter())
162+ .then(prove_rows)
163+ .concat()
164+ .await
165+ */
166+ let mut chunks = vec ! [ ] ;
167+ for index_value in sorted_index_values {
168+ let chunk = prove_rows ( index_value) . await ;
169+ chunks. extend ( chunk) ;
170+ }
171+
172+ let chunks = chunks
166173 . chunks ( CHUNK_SIZE )
167174 . map ( |chunk| chunk. to_vec ( ) )
168- . collect_vec ( ) )
175+ . collect_vec ( ) ;
176+
177+ Ok ( chunks)
169178}
170179
171180/// Key for nodes of the `UTForChunks<NUM_CHUNKS>` employed to
@@ -195,8 +204,10 @@ async fn generate_chunks<const CHUNK_SIZE: usize, C: ContextProvider>(
195204///
196205/// (2,0) (2,1) (2,2) (2,3) (2,4)
197206/// ```
198- #[ derive( Clone , Debug , Hash , Eq , PartialEq , Default ) ]
199- pub struct UTKey < const ARITY : usize > ( ( usize , usize ) ) ;
207+ #[ derive(
208+ Clone , Copy , Debug , Default , PartialEq , PartialOrd , Ord , Eq , Hash , Serialize , Deserialize ,
209+ ) ]
210+ pub struct UTKey < const ARITY : usize > ( pub ( usize , usize ) ) ;
200211
201212impl < const ARITY : usize > UTKey < ARITY > {
202213 /// Compute the key of the child node of `self` that has `num_left_siblings`
@@ -318,15 +329,13 @@ impl<const ARITY: usize> ProvingTree<ARITY> {
318329 let num_childrens = parent_node. children_keys . len ( ) ;
319330 let new_child_key = parent_key. children_key ( num_childrens) ;
320331 let child_node = ProvingTreeNode {
321- parent_key : Some ( parent_key. clone ( ) ) ,
332+ parent_key : Some ( * parent_key) ,
322333 children_keys : vec ! [ ] ,
323334 } ;
324335 // insert new child in the set of children of the parent
325- parent_node. children_keys . push ( new_child_key. clone ( ) ) ;
336+ parent_node. children_keys . push ( new_child_key) ;
326337 assert ! (
327- self . nodes
328- . insert( new_child_key. clone( ) , child_node)
329- . is_none( ) ,
338+ self . nodes. insert( new_child_key, child_node) . is_none( ) ,
330339 "Node with key {:?} already found in the tree" ,
331340 new_child_key
332341 ) ;
@@ -339,7 +348,7 @@ impl<const ARITY: usize> ProvingTree<ARITY> {
339348 } ;
340349 let root_key = UTKey ( ( 0 , 0 ) ) ;
341350 assert ! (
342- self . nodes. insert( root_key. clone ( ) , root) . is_none( ) ,
351+ self . nodes. insert( root_key, root) . is_none( ) ,
343352 "Error: root node inserted multiple times"
344353 ) ;
345354 root_key
@@ -412,7 +421,7 @@ impl<const ARITY: usize> ProvingTree<ARITY> {
412421 while node_key. is_some ( ) {
413422 // place node key in the path
414423 let key = node_key. unwrap ( ) ;
415- path. push ( key. clone ( ) ) ;
424+ path. push ( * key) ;
416425 // fetch key of the parent node, if any
417426 node_key = self
418427 . nodes
@@ -449,7 +458,7 @@ impl<const NUM_CHUNKS: usize> UTForChunksBuilder<NUM_CHUNKS> {
449458 let path = tree. compute_path_for_leaf ( node_index) ;
450459 (
451460 (
452- path. last ( ) . unwrap ( ) . clone ( ) , // chunk node is always a leaf of the tree, so it is the last node
461+ * path. last ( ) . unwrap ( ) , // chunk node is always a leaf of the tree, so it is the last node
453462 // in the path
454463 chunk,
455464 ) ,
0 commit comments