From 312d3ad05ccc8dad86d6c5c9144dfac7eda2381a Mon Sep 17 00:00:00 2001 From: ananas-block Date: Sat, 31 May 2025 00:14:28 +0100 Subject: [PATCH] chore: remove outdated Merkle tree crates --- concurrent/Cargo.lock | 2575 -------------------------- concurrent/Cargo.toml | 38 - concurrent/src/changelog.rs | 140 -- concurrent/src/copy.rs | 231 --- concurrent/src/errors.rs | 72 - concurrent/src/event.rs | 87 - concurrent/src/hash.rs | 41 - concurrent/src/lib.rs | 702 ------- concurrent/src/zero_copy.rs | 369 ---- concurrent/tests/tests.rs | 3421 ----------------------------------- hash-set/Cargo.lock | 1875 ------------------- hash-set/Cargo.toml | 26 - hash-set/src/lib.rs | 1167 ------------ hash-set/src/zero_copy.rs | 239 --- indexed/Cargo.lock | 1944 -------------------- indexed/Cargo.toml | 35 - indexed/src/array.rs | 1255 ------------- indexed/src/changelog.rs | 16 - indexed/src/copy.rs | 206 --- indexed/src/errors.rs | 66 - indexed/src/lib.rs | 530 ------ indexed/src/reference.rs | 211 --- indexed/src/zero_copy.rs | 343 ---- indexed/tests/tests.rs | 960 ---------- scripts/lint.sh | 4 +- 25 files changed, 2 insertions(+), 16551 deletions(-) delete mode 100644 concurrent/Cargo.lock delete mode 100644 concurrent/Cargo.toml delete mode 100644 concurrent/src/changelog.rs delete mode 100644 concurrent/src/copy.rs delete mode 100644 concurrent/src/errors.rs delete mode 100644 concurrent/src/event.rs delete mode 100644 concurrent/src/hash.rs delete mode 100644 concurrent/src/lib.rs delete mode 100644 concurrent/src/zero_copy.rs delete mode 100644 concurrent/tests/tests.rs delete mode 100644 hash-set/Cargo.lock delete mode 100644 hash-set/Cargo.toml delete mode 100644 hash-set/src/lib.rs delete mode 100644 hash-set/src/zero_copy.rs delete mode 100644 indexed/Cargo.lock delete mode 100644 indexed/Cargo.toml delete mode 100644 indexed/src/array.rs delete mode 100644 indexed/src/changelog.rs delete mode 100644 indexed/src/copy.rs delete mode 100644 indexed/src/errors.rs delete mode 100644 indexed/src/lib.rs delete mode 100644 indexed/src/reference.rs delete mode 100644 indexed/src/zero_copy.rs delete mode 100644 indexed/tests/tests.rs diff --git a/concurrent/Cargo.lock b/concurrent/Cargo.lock deleted file mode 100644 index c74b5b4..0000000 --- a/concurrent/Cargo.lock +++ /dev/null @@ -1,2575 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" - -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom 0.2.15", - "once_cell", - "version_check", -] - -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "anchor-attribute-access-control" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5f619f1d04f53621925ba8a2e633ba5a6081f2ae14758cbb67f38fd823e0a3e" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-account" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f2a3e1df4685f18d12a943a9f2a7456305401af21a07c9fe076ef9ecd6e400" -dependencies = [ - "anchor-syn", - "bs58 0.5.1", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-constant" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9423945cb55627f0b30903288e78baf6f62c6c8ab28fb344b6b25f1ffee3dca7" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-error" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ed12720033cc3c3bf3cfa293349c2275cd5ab99936e33dd4bf283aaad3e241" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-event" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eef4dc0371eba2d8c8b54794b0b0eb786a234a559b77593d6f80825b6d2c77a2" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-program" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b18c4f191331e078d4a6a080954d1576241c29c56638783322a18d308ab27e4f" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-accounts" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de10d6e9620d3bcea56c56151cad83c5992f50d5960b3a9bebc4a50390ddc3c" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-serde" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4e2e5be518ec6053d90a2a7f26843dbee607583c779e6c8395951b9739bdfbe" -dependencies = [ - "anchor-syn", - "borsh-derive-internal 0.10.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-space" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecc31d19fa54840e74b7a979d44bcea49d70459de846088a1d71e87ba53c419" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-lang" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35da4785497388af0553586d55ebdc08054a8b1724720ef2749d313494f2b8ad" -dependencies = [ - "anchor-attribute-access-control", - "anchor-attribute-account", - "anchor-attribute-constant", - "anchor-attribute-error", - "anchor-attribute-event", - "anchor-attribute-program", - "anchor-derive-accounts", - "anchor-derive-serde", - "anchor-derive-space", - "arrayref", - "base64 0.13.1", - "bincode", - "borsh 0.10.4", - "bytemuck", - "getrandom 0.2.15", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "anchor-syn" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9101b84702fed2ea57bd22992f75065da5648017135b844283a2f6d74f27825" -dependencies = [ - "anyhow", - "bs58 0.5.1", - "heck", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2 0.10.8", - "syn 1.0.109", - "thiserror", -] - -[[package]] -name = "anyhow" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" - -[[package]] -name = "ark-bn254" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" -dependencies = [ - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", - "itertools", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "digest 0.10.7", - "itertools", - "num-bigint", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-poly" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "backtrace" -version = "0.3.74" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets", -] - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" -dependencies = [ - "serde", -] - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "blake3" -version = "1.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", - "digest 0.10.7", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "borsh" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa" -dependencies = [ - "borsh-derive 0.9.3", - "hashbrown 0.11.2", -] - -[[package]] -name = "borsh" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115e54d64eb62cdebad391c19efc9dce4981c690c85a33a12199d99bb9546fee" -dependencies = [ - "borsh-derive 0.10.4", - "hashbrown 0.13.2", -] - -[[package]] -name = "borsh" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" -dependencies = [ - "borsh-derive 1.5.3", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775" -dependencies = [ - "borsh-derive-internal 0.9.3", - "borsh-schema-derive-internal 0.9.3", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831213f80d9423998dd696e2c5345aba6be7a0bd8cd19e31c5243e13df1cef89" -dependencies = [ - "borsh-derive-internal 0.10.4", - "borsh-schema-derive-internal 0.10.4", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" -dependencies = [ - "once_cell", - "proc-macro-crate 3.2.0", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65d6ba50644c98714aa2a70d13d7df3cd75cd2b523a2b452bf010443800976b3" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276691d96f063427be83e6692b86148e488ebba9f48f77788724ca027ba3b6d4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "bv" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" -dependencies = [ - "feature-probe", - "serde", -] - -[[package]] -name = "bytemuck" -version = "1.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "cc" -version = "1.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" -dependencies = [ - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - -[[package]] -name = "console_log" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89f72f65e8501878b8a004d5a1afb780987e2ce2b4532c562e367a72c57499f" -dependencies = [ - "log", - "web-sys", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - -[[package]] -name = "cpufeatures" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" -dependencies = [ - "libc", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array", - "subtle", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "serde", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek" -version = "4.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest 0.10.7", - "fiat-crypto", - "rand_core 0.6.4", - "rustc_version", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer 0.10.4", - "crypto-common", - "subtle", -] - -[[package]] -name = "either" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "feature-probe" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" - -[[package]] -name = "fiat-crypto" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" - -[[package]] -name = "five8_const" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b4f62f0f8ca357f93ae90c8c2dd1041a1f665fde2f889ea9b1787903829015" -dependencies = [ - "five8_core", -] - -[[package]] -name = "five8_core" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94474d15a76982be62ca8a39570dccce148d98c238ebb7408b0a21b2c4bdddc4" - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "serde", - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash 0.7.8", -] - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash 0.8.11", -] - -[[package]] -name = "hashbrown" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" - -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array", - "hmac", -] - -[[package]] -name = "im" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" -dependencies = [ - "bitmaps", - "rand_core 0.6.4", - "rand_xoshiro", - "rayon", - "serde", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "indexmap" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" -dependencies = [ - "equivalent", - "hashbrown 0.15.2", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" - -[[package]] -name = "jobserver" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "libc" -version = "0.2.169" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - -[[package]] -name = "libsecp256k1" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" -dependencies = [ - "arrayref", - "base64 0.12.3", - "digest 0.9.0", - "hmac-drbg", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand 0.7.3", - "serde", - "sha2 0.9.9", - "typenum", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "light-bounded-vec" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47ced86d6f1b163a04d5d0be44f8bbeedb11d32f73af27812bbd144e0f1f1a42" -dependencies = [ - "bytemuck", - "memoffset", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "light-concurrent-merkle-tree" -version = "1.1.0" -dependencies = [ - "ark-bn254", - "ark-ff", - "borsh 0.10.4", - "bytemuck", - "light-bounded-vec", - "light-hash-set", - "light-hasher", - "light-merkle-tree-reference", - "light-utils", - "memoffset", - "num-bigint", - "num-traits", - "rand 0.8.5", - "solana-program 1.18.22", - "spl-account-compression", - "spl-concurrent-merkle-tree 0.2.0", - "thiserror", - "tokio", -] - -[[package]] -name = "light-hash-set" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7cd392ed4df05a545dfb8a58ef72639fbba9dee7a0605b81c3370d02161932b" -dependencies = [ - "light-bounded-vec", - "light-heap", - "light-utils", - "memoffset", - "num-bigint", - "num-traits", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "light-hasher" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932ed98282fa564ff4518416de688593a0f425c81d68cfa70e98da21a17a36f" -dependencies = [ - "ark-bn254", - "light-poseidon", - "sha2 0.10.8", - "sha3", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "light-heap" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7304b9ef6e32c540b685fb3cb13081db831b8f4ea03d1d5d54491dee19100eb5" -dependencies = [ - "anchor-lang", -] - -[[package]] -name = "light-merkle-tree-reference" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9153fd97f1bdba3ec1de6f4c4f20134c6e5e1285676bcb9ef5ebe493f41afa" -dependencies = [ - "light-bounded-vec", - "light-hasher", - "log", - "thiserror", -] - -[[package]] -name = "light-poseidon" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9a85a9752c549ceb7578064b4ed891179d20acd85f27318573b64d2d7ee7ee" -dependencies = [ - "ark-bn254", - "ark-ff", - "num-bigint", - "thiserror", -] - -[[package]] -name = "light-utils" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e2b5a4959cb0456b483a20b4f3930920949137c00e76b5d0f9bf8d701a3c6a" -dependencies = [ - "anyhow", - "ark-bn254", - "ark-ff", - "light-bounded-vec", - "num-bigint", - "rand 0.8.5", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "memmap2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "miniz_oxide" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" -dependencies = [ - "adler2", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "num-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", -] - -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" - -[[package]] -name = "opaque-debug" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "pbkdf2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" -dependencies = [ - "crypto-mac", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" - -[[package]] -name = "ppv-lite86" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-crate" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro2" -version = "1.0.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core 0.6.4", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" -dependencies = [ - "bitflags", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" - -[[package]] -name = "ryu" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "semver" -version = "1.0.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" - -[[package]] -name = "serde" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_bytes" -version = "0.11.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_json" -version = "1.0.134" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "solana-account-info" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42051fa2def3a2d9123f0e33b62a983b25c13d153a30e707b14d3c3b79a91592" -dependencies = [ - "bincode", - "serde", - "solana-program-error", - "solana-program-memory", - "solana-pubkey", -] - -[[package]] -name = "solana-atomic-u64" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10dad9cf8527bbf74d3668505f563bd362e2e14d0fc77338d20973e881bbad0b" -dependencies = [ - "parking_lot", -] - -[[package]] -name = "solana-bincode" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e3b178d3783809a2480c542cd7c57c06e1bab2c0f21562fcb8cd13eabd0138e" -dependencies = [ - "bincode", - "serde", - "solana-instruction", -] - -[[package]] -name = "solana-borsh" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870197ea4929500d8e6f624c3eb578912b5063bbd5c8bfbe87396cd5b4257465" -dependencies = [ - "borsh 0.10.4", - "borsh 1.5.3", -] - -[[package]] -name = "solana-clock" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b26a68f89972fddb370ba33a49340bd3419da529893d9ee851211588aee811fa" -dependencies = [ - "serde", - "serde_derive", - "solana-sdk-macro 2.1.6", - "solana-sysvar-id", -] - -[[package]] -name = "solana-cpi" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3b62e1ca838f92b90c25ab68c297272cee8e2256dad18806a219b05cfcd247" -dependencies = [ - "solana-account-info", - "solana-define-syscall", - "solana-instruction", - "solana-program-error", - "solana-pubkey", - "solana-stable-layout", -] - -[[package]] -name = "solana-decode-error" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1323dbdb7c8ca717bcd7987a3f45619b5b6517dc3ee22a21342122a5516125c3" -dependencies = [ - "num-traits", -] - -[[package]] -name = "solana-define-syscall" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23cf0144176f94019a27ce46372661f67007232eea16cae96cb985fc25131d5" - -[[package]] -name = "solana-epoch-schedule" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c60dde3613fcd1af91c2033e67ffe8c8d2bcd58085c53c842fa7903fa839ad38" -dependencies = [ - "serde", - "serde_derive", - "solana-sdk-macro 2.1.6", - "solana-sysvar-id", -] - -[[package]] -name = "solana-fee-calculator" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c706f3d151d0abc197ca2fcecf877ace03d613be6fae766de12f5fb41c96b04" -dependencies = [ - "log", - "serde", - "serde_derive", -] - -[[package]] -name = "solana-frozen-abi" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a6ef2db80dceb124b7bf81cca3300804bf427d2711973fc3df450ed7dfb26d" -dependencies = [ - "block-buffer 0.10.4", - "bs58 0.4.0", - "bv", - "either", - "generic-array", - "im", - "lazy_static", - "log", - "memmap2", - "rustc_version", - "serde", - "serde_bytes", - "serde_derive", - "sha2 0.10.8", - "solana-frozen-abi-macro", - "subtle", - "thiserror", -] - -[[package]] -name = "solana-frozen-abi-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70088de7d4067d19a7455609e2b393e6086bd847bb39c4d2bf234fc14827ef9e" -dependencies = [ - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.90", -] - -[[package]] -name = "solana-hash" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a86aabbd7ebf807689a0355f053d6dc31d2131c2d83613011a374a18cc5d61b7" -dependencies = [ - "borsh 1.5.3", - "bs58 0.5.1", - "bytemuck", - "bytemuck_derive", - "js-sys", - "serde", - "serde_derive", - "solana-atomic-u64", - "solana-sanitize", - "wasm-bindgen", -] - -[[package]] -name = "solana-instruction" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94ed5710c998efd09ffb596cf5e7266c11cd56e3a136c8a1f940e8525fd5be6e" -dependencies = [ - "bincode", - "borsh 1.5.3", - "getrandom 0.2.15", - "js-sys", - "num-traits", - "serde", - "serde_derive", - "solana-define-syscall", - "solana-pubkey", - "wasm-bindgen", -] - -[[package]] -name = "solana-last-restart-slot" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5647af0980c796c942e33f1f7dbffca29b7747630b720e6975abb1d7c531f6" -dependencies = [ - "serde", - "serde_derive", - "solana-sdk-macro 2.1.6", - "solana-sysvar-id", -] - -[[package]] -name = "solana-msg" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d6a46fcbbaa38193b5b6aeec531395da8dac8dcd183ac6d80d94e6513fc4ad8" -dependencies = [ - "solana-define-syscall", -] - -[[package]] -name = "solana-native-token" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0aa3c5006bbea99b810ad8fc6ae168fc83891b607a13a9aa6be39db71a700f87" - -[[package]] -name = "solana-program" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb2b2c8babfae4cace1a25b6efa00418f3acd852cf55d7cecc0360d3c5050479" -dependencies = [ - "ark-bn254", - "ark-ec", - "ark-ff", - "ark-serialize", - "base64 0.21.7", - "bincode", - "bitflags", - "blake3", - "borsh 0.10.4", - "borsh 0.9.3", - "borsh 1.5.3", - "bs58 0.4.0", - "bv", - "bytemuck", - "cc", - "console_error_panic_hook", - "console_log", - "curve25519-dalek 3.2.1", - "getrandom 0.2.15", - "itertools", - "js-sys", - "lazy_static", - "libc", - "libsecp256k1", - "light-poseidon", - "log", - "memoffset", - "num-bigint", - "num-derive", - "num-traits", - "parking_lot", - "rand 0.8.5", - "rustc_version", - "rustversion", - "serde", - "serde_bytes", - "serde_derive", - "serde_json", - "sha2 0.10.8", - "sha3", - "solana-frozen-abi", - "solana-frozen-abi-macro", - "solana-sdk-macro 1.18.22", - "thiserror", - "tiny-bip39", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "solana-program" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afc847b91b77c48113a55f7f492cc7e982e1778c6b954a05feb499f9b8c3cc4" -dependencies = [ - "base64 0.22.1", - "bincode", - "bitflags", - "blake3", - "borsh 0.10.4", - "borsh 1.5.3", - "bs58 0.5.1", - "bv", - "bytemuck", - "bytemuck_derive", - "console_error_panic_hook", - "console_log", - "curve25519-dalek 4.1.3", - "five8_const", - "getrandom 0.2.15", - "js-sys", - "lazy_static", - "log", - "memoffset", - "num-bigint", - "num-derive", - "num-traits", - "parking_lot", - "rand 0.8.5", - "serde", - "serde_bytes", - "serde_derive", - "sha2 0.10.8", - "sha3", - "solana-account-info", - "solana-atomic-u64", - "solana-bincode", - "solana-borsh", - "solana-clock", - "solana-cpi", - "solana-decode-error", - "solana-define-syscall", - "solana-epoch-schedule", - "solana-fee-calculator", - "solana-hash", - "solana-instruction", - "solana-last-restart-slot", - "solana-msg", - "solana-native-token", - "solana-program-entrypoint", - "solana-program-error", - "solana-program-memory", - "solana-program-option", - "solana-program-pack", - "solana-pubkey", - "solana-rent", - "solana-sanitize", - "solana-sdk-macro 2.1.6", - "solana-secp256k1-recover", - "solana-serde-varint", - "solana-serialize-utils", - "solana-sha256-hasher", - "solana-short-vec", - "solana-slot-hashes", - "solana-slot-history", - "solana-stable-layout", - "solana-sysvar-id", - "solana-transaction-error", - "thiserror", - "wasm-bindgen", -] - -[[package]] -name = "solana-program-entrypoint" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "988a49fb8231e95861d11b40931f49e06f0dea5a29241acf7cbca644c52abd6b" -dependencies = [ - "solana-account-info", - "solana-msg", - "solana-program-error", - "solana-pubkey", -] - -[[package]] -name = "solana-program-error" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2473773ee9cebf6ba3d7d1fe911938bc2a3a694e85bea33fd7a99d397cde1202" -dependencies = [ - "borsh 1.5.3", - "num-traits", - "serde", - "serde_derive", - "solana-decode-error", - "solana-instruction", - "solana-msg", - "solana-pubkey", -] - -[[package]] -name = "solana-program-memory" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d9e6ca90bbc3020b7b37091c05049f5de48e397545093ec303dc6eff3d4720c" -dependencies = [ - "num-traits", - "solana-define-syscall", -] - -[[package]] -name = "solana-program-option" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c2334c5f9adcc25c6390fbf87ac127adbfbd8943465726e5f389159677ceba2" - -[[package]] -name = "solana-program-pack" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305475eef9404539cce0c561ab9997b875cc5509f7c553859cd059fdf93b0ab2" -dependencies = [ - "solana-program-error", -] - -[[package]] -name = "solana-pubkey" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a954fba3af498201179981818b0ed61f95c32b4a3db5ea9cc042c971c84cdeae" -dependencies = [ - "borsh 0.10.4", - "borsh 1.5.3", - "bs58 0.5.1", - "bytemuck", - "bytemuck_derive", - "curve25519-dalek 4.1.3", - "five8_const", - "getrandom 0.2.15", - "js-sys", - "num-traits", - "serde", - "serde_derive", - "solana-atomic-u64", - "solana-decode-error", - "solana-define-syscall", - "solana-sanitize", - "solana-sha256-hasher", - "wasm-bindgen", -] - -[[package]] -name = "solana-rent" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebcc59bff8b3c773214545d038b718a3e2e63c920b8172f85725463029f7f00" -dependencies = [ - "serde", - "serde_derive", - "solana-sdk-macro 2.1.6", - "solana-sysvar-id", -] - -[[package]] -name = "solana-sanitize" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05ecd7ec442abf0561cbf06984484d6368e71a4882213bfa68b658b0f8d6a0e" - -[[package]] -name = "solana-sdk-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c55c196c8050834c391a34b58e3c9fd86b15452ef1feeeafa1dbeb9d2291dfec" -dependencies = [ - "bs58 0.4.0", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.90", -] - -[[package]] -name = "solana-sdk-macro" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85df4723291cfec8ffe9dadc59d565afcae12ea9a6460b7b28c4da21c2c4a887" -dependencies = [ - "bs58 0.5.1", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "solana-secp256k1-recover" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41c38fc7bc33f78af99c4848c9a924b2b6e5d33d96f269d108777d982de72f73" -dependencies = [ - "libsecp256k1", - "solana-define-syscall", - "thiserror", -] - -[[package]] -name = "solana-security-txt" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "468aa43b7edb1f9b7b7b686d5c3aeb6630dc1708e86e31343499dd5c4d775183" - -[[package]] -name = "solana-serde-varint" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e2fc696c10a2b02356584cbd45d83d42b01b10256cb36b5d0c3768e5adf9283" -dependencies = [ - "serde", -] - -[[package]] -name = "solana-serialize-utils" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b04c6fb71e4cdd10480bc8d306aca6d2a7494e6267e4f103085a89b2ec04e4c" -dependencies = [ - "solana-instruction", - "solana-pubkey", - "solana-sanitize", -] - -[[package]] -name = "solana-sha256-hasher" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4c977c6c4d95c8b43a9f405844f29a73c9d8b0a7c561b91d4f6a44f290d35c" -dependencies = [ - "sha2 0.10.8", - "solana-define-syscall", - "solana-hash", -] - -[[package]] -name = "solana-short-vec" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ca5799b37642e4e273d7d848564739eab45df670edcc61b4696ef0d5ebe4a8c" -dependencies = [ - "serde", -] - -[[package]] -name = "solana-slot-hashes" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e596d3719b4c03987de87c8cc25b34b6afcf7464c82b4d9c9b114304d882c97" -dependencies = [ - "serde", - "serde_derive", - "solana-hash", - "solana-sysvar-id", -] - -[[package]] -name = "solana-slot-history" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4022b0e8a7f043bd61504fff79176c02b52f69a69299023884b194a1405c0f05" -dependencies = [ - "bv", - "serde", - "serde_derive", - "solana-sysvar-id", -] - -[[package]] -name = "solana-stable-layout" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e82f0665dfcfcb4433708abff54db5ee105fe1bb8db7fd409074bd0275582105" -dependencies = [ - "solana-instruction", - "solana-pubkey", -] - -[[package]] -name = "solana-sysvar-id" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bbed7acdd711e0d620c9b7f788d041d35731c2c675d1d687498745d73826ca4" -dependencies = [ - "solana-pubkey", -] - -[[package]] -name = "solana-transaction-error" -version = "2.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae45f064c8e6006a426b31a1182123ec4daf8cca50bd7aea6e796e6205a7911e" -dependencies = [ - "solana-instruction", - "solana-sanitize", -] - -[[package]] -name = "spl-account-compression" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcf740e5242f2ad63325e600c368702f32db84608fc8b70d70633c68dd1486d" -dependencies = [ - "anchor-lang", - "bytemuck", - "solana-program 1.18.22", - "solana-security-txt", - "spl-concurrent-merkle-tree 0.3.0", - "spl-noop", -] - -[[package]] -name = "spl-concurrent-merkle-tree" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "141eaea58588beae81b71d101373a53f096737739873de42d6b1368bc2b8fc30" -dependencies = [ - "bytemuck", - "solana-program 1.18.22", - "thiserror", -] - -[[package]] -name = "spl-concurrent-merkle-tree" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f5f45b971d82cbb0416fdffad3c9098f259545d54072e83a0a482f60f8f689" -dependencies = [ - "bytemuck", - "solana-program 2.1.6", - "thiserror", -] - -[[package]] -name = "spl-noop" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd67ea3d0070a12ff141f5da46f9695f49384a03bce1203a5608f5739437950" -dependencies = [ - "solana-program 1.18.22", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tiny-bip39" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc59cb9dfc85bb312c3a78fd6aa8a8582e310b0fa885d5bb877f6dcc601839d" -dependencies = [ - "anyhow", - "hmac", - "once_cell", - "pbkdf2", - "rand 0.7.3", - "rustc-hash", - "sha2 0.9.9", - "thiserror", - "unicode-normalization", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "tinyvec" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" -dependencies = [ - "backtrace", - "pin-project-lite", - "tokio-macros", -] - -[[package]] -name = "tokio-macros" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap", - "toml_datetime", - "winnow", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "unicode-ident" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" - -[[package]] -name = "unicode-normalization" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" -dependencies = [ - "cfg-if", - "once_cell", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" - -[[package]] -name = "web-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "byteorder", - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "zeroize" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] diff --git a/concurrent/Cargo.toml b/concurrent/Cargo.toml deleted file mode 100644 index d7a4195..0000000 --- a/concurrent/Cargo.toml +++ /dev/null @@ -1,38 +0,0 @@ -[package] -name = "light-concurrent-merkle-tree" -version = "1.1.0" -edition = "2021" -description = "Concurrent Merkle tree implementation" -repository = "https://github.com/Lightprotocol/light-protocol" -license = "Apache-2.0" - -[features] -heavy-tests = [] -solana = [ - "light-bounded-vec/solana", - "light-hasher/solana", - "solana-program" -] - -[dependencies] -borsh = "0.10" -bytemuck = { version = "1.17", features = ["derive"] } -light-bounded-vec = { version = "1.1.0" } -light-hasher = { version = "1.1.0" } -light-utils = { version = "1.1.0" } -memoffset = "0.9" -solana-program = { version="=1.18.22", optional = true } -thiserror = "1.0" - -[dev-dependencies] -ark-bn254 = "0.4" -ark-ff = "0.4" -light-merkle-tree-reference = { version = "1.1.0" } -light-hash-set = { version = "1.1.0", features = ["solana"] } -rand = "0.8" -solana-program = { version="=1.18.22" } -spl-account-compression = { version = "0.3.0", default-features = false} -spl-concurrent-merkle-tree = { version = "0.2.0", default-features = false} -tokio = { version = "1.39.1", features = ["rt", "macros", "rt-multi-thread"] } -num-bigint = "0.4" -num-traits = "0.2" diff --git a/concurrent/src/changelog.rs b/concurrent/src/changelog.rs deleted file mode 100644 index c0c74e4..0000000 --- a/concurrent/src/changelog.rs +++ /dev/null @@ -1,140 +0,0 @@ -use std::ops::{Deref, DerefMut}; - -use light_bounded_vec::BoundedVec; - -use crate::errors::ConcurrentMerkleTreeError; - -#[derive(Clone, Debug, PartialEq, Eq)] -#[repr(transparent)] -pub struct ChangelogPath(pub [Option<[u8; 32]>; HEIGHT]); - -impl ChangelogPath { - pub fn from_fn(cb: F) -> Self - where - F: FnMut(usize) -> Option<[u8; 32]>, - { - Self(std::array::from_fn(cb)) - } - - /// Checks whether the path is equal to the provided [`BoundedVec`]. - /// - /// [`ChangelogPath`] might contain `None` nodes at the end, which - /// mean that it does not define them, but the following changelog - /// paths are expected to overwrite them. - /// - /// Therefore, the comparison ends on the first encountered first - /// `None`. If all `Some` nodes are equal to the corresponding ones - /// in the provided vector, the result is `true`. - pub fn eq_to(&self, other: BoundedVec<[u8; 32]>) -> bool { - if other.len() != HEIGHT { - return false; - } - - for i in 0..HEIGHT { - let changelog_node = self.0[i]; - let path_node = other[i]; - match changelog_node { - Some(changelog_node) => { - if changelog_node != path_node { - return false; - } - } - None => break, - } - } - - true - } -} - -impl Default for ChangelogPath { - fn default() -> Self { - Self([None; HEIGHT]) - } -} - -impl Deref for ChangelogPath { - type Target = [Option<[u8; 32]>; HEIGHT]; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for ChangelogPath { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -#[repr(C)] -pub struct ChangelogEntry { - // Path of the changelog. - pub path: ChangelogPath, - // Index of the affected leaf. - pub index: u64, -} - -pub type ChangelogEntry22 = ChangelogEntry<22>; -pub type ChangelogEntry26 = ChangelogEntry<26>; -pub type ChangelogEntry32 = ChangelogEntry<32>; -pub type ChangelogEntry40 = ChangelogEntry<40>; - -impl ChangelogEntry { - pub fn new(path: ChangelogPath, index: usize) -> Self { - let index = index as u64; - Self { path, index } - } - - pub fn default_with_index(index: usize) -> Self { - Self { - path: ChangelogPath::default(), - index: index as u64, - } - } - - pub fn index(&self) -> usize { - self.index as usize - } - - /// Returns an intersection index in the changelog entry which affects the - /// provided path. - /// - /// Determining it can be done by taking a XOR of the leaf index (which was - /// directly updated in the changelog entry) and the leaf index we are - /// trying to update. - /// - /// The number of bytes in the binary representations of the indexes is - /// determined by the height of the tree. For example, for the tree with - /// height 4, update attempt of leaf under index 2 and changelog affecting - /// index 4, critbit would be: - /// - /// 2 ^ 4 = 0b_0010 ^ 0b_0100 = 0b_0110 = 6 - fn intersection_index(&self, leaf_index: usize) -> usize { - let padding = 64 - HEIGHT; - let common_path_len = ((leaf_index ^ self.index()) << padding).leading_zeros() as usize; - (HEIGHT - 1) - common_path_len - } - - pub fn update_proof( - &self, - leaf_index: usize, - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(), ConcurrentMerkleTreeError> { - if leaf_index != self.index() { - let intersection_index = self.intersection_index(leaf_index); - if let Some(node) = self.path[intersection_index] { - proof[intersection_index] = node; - } - } else { - // This case means that the leaf we are trying to update was - // already updated. Therefore, the right thing to do is to notify - // the caller to sync the local Merkle tree and update the leaf, - // if necessary. - return Err(ConcurrentMerkleTreeError::CannotUpdateLeaf); - } - - Ok(()) - } -} diff --git a/concurrent/src/copy.rs b/concurrent/src/copy.rs deleted file mode 100644 index 068be11..0000000 --- a/concurrent/src/copy.rs +++ /dev/null @@ -1,231 +0,0 @@ -use std::ops::Deref; - -use crate::{errors::ConcurrentMerkleTreeError, ConcurrentMerkleTree}; -use light_bounded_vec::{BoundedVecMetadata, CyclicBoundedVecMetadata}; -use light_hasher::Hasher; -use light_utils::offset::copy::{read_bounded_vec_at, read_cyclic_bounded_vec_at, read_value_at}; -use memoffset::{offset_of, span_of}; - -#[derive(Debug)] -pub struct ConcurrentMerkleTreeCopy(ConcurrentMerkleTree) -where - H: Hasher; - -impl ConcurrentMerkleTreeCopy -where - H: Hasher, -{ - pub fn struct_from_bytes_copy( - bytes: &[u8], - ) -> Result<(ConcurrentMerkleTree, usize), ConcurrentMerkleTreeError> { - let expected_size = ConcurrentMerkleTree::::non_dyn_fields_size(); - if bytes.len() < expected_size { - return Err(ConcurrentMerkleTreeError::BufferSize( - expected_size, - bytes.len(), - )); - } - - let height = usize::from_le_bytes( - bytes[span_of!(ConcurrentMerkleTree, height)] - .try_into() - .unwrap(), - ); - let canopy_depth = usize::from_le_bytes( - bytes[span_of!(ConcurrentMerkleTree, canopy_depth)] - .try_into() - .unwrap(), - ); - - let mut offset = offset_of!(ConcurrentMerkleTree, next_index); - - let next_index = unsafe { read_value_at(bytes, &mut offset) }; - let sequence_number = unsafe { read_value_at(bytes, &mut offset) }; - let rightmost_leaf = unsafe { read_value_at(bytes, &mut offset) }; - let filled_subtrees_metadata: BoundedVecMetadata = - unsafe { read_value_at(bytes, &mut offset) }; - let changelog_metadata: CyclicBoundedVecMetadata = - unsafe { read_value_at(bytes, &mut offset) }; - let roots_metadata: CyclicBoundedVecMetadata = unsafe { read_value_at(bytes, &mut offset) }; - let canopy_metadata: BoundedVecMetadata = unsafe { read_value_at(bytes, &mut offset) }; - - let expected_size = ConcurrentMerkleTree::::size_in_account( - height, - changelog_metadata.capacity(), - roots_metadata.capacity(), - canopy_depth, - ); - if bytes.len() < expected_size { - return Err(ConcurrentMerkleTreeError::BufferSize( - expected_size, - bytes.len(), - )); - } - - let filled_subtrees = - unsafe { read_bounded_vec_at(bytes, &mut offset, &filled_subtrees_metadata) }; - let changelog = - unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &changelog_metadata) }; - let roots = unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &roots_metadata) }; - let canopy = unsafe { read_bounded_vec_at(bytes, &mut offset, &canopy_metadata) }; - - let mut merkle_tree = ConcurrentMerkleTree::new( - height, - changelog_metadata.capacity(), - roots_metadata.capacity(), - canopy_depth, - )?; - // SAFETY: Tree is initialized. - unsafe { - *merkle_tree.next_index = next_index; - *merkle_tree.sequence_number = sequence_number; - *merkle_tree.rightmost_leaf = rightmost_leaf; - } - merkle_tree.filled_subtrees = filled_subtrees; - merkle_tree.changelog = changelog; - merkle_tree.roots = roots; - merkle_tree.canopy = canopy; - - Ok((merkle_tree, offset)) - } - - pub fn from_bytes_copy(bytes: &[u8]) -> Result { - let (merkle_tree, _) = Self::struct_from_bytes_copy(bytes)?; - merkle_tree.check_size_constraints()?; - Ok(Self(merkle_tree)) - } -} - -impl Deref for ConcurrentMerkleTreeCopy -where - H: Hasher, -{ - type Target = ConcurrentMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[cfg(test)] -mod test { - use crate::zero_copy::ConcurrentMerkleTreeZeroCopyMut; - - use super::*; - - use ark_bn254::Fr; - use ark_ff::{BigInteger, PrimeField, UniformRand}; - use light_hasher::Poseidon; - use rand::{thread_rng, Rng}; - - fn from_bytes_copy< - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY_DEPTH: usize, - const OPERATIONS: usize, - >() { - let mut mt_1 = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY_DEPTH) - .unwrap(); - mt_1.init().unwrap(); - - // Create a buffer with random bytes - the `*_init` method should - // initialize the buffer gracefully and the randomness shouldn't cause - // undefined behavior. - let mut bytes = vec![ - 0u8; - ConcurrentMerkleTree::::size_in_account( - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY_DEPTH - ) - ]; - thread_rng().fill(bytes.as_mut_slice()); - - // Initialize a Merkle tree on top of a byte slice. - { - let mut mt = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - bytes.as_mut_slice(), - HEIGHT, - CANOPY_DEPTH, - CHANGELOG, - ROOTS, - ) - .unwrap(); - mt.init().unwrap(); - - // Ensure that it was properly initialized. - assert_eq!(mt.height, HEIGHT); - assert_eq!(mt.canopy_depth, CANOPY_DEPTH); - assert_eq!(mt.next_index(), 0); - assert_eq!(mt.sequence_number(), 0); - assert_eq!(mt.rightmost_leaf(), Poseidon::zero_bytes()[0]); - - assert_eq!(mt.filled_subtrees.capacity(), HEIGHT); - assert_eq!(mt.filled_subtrees.len(), HEIGHT); - - assert_eq!(mt.changelog.capacity(), CHANGELOG); - assert_eq!(mt.changelog.len(), 1); - - assert_eq!(mt.roots.capacity(), ROOTS); - assert_eq!(mt.roots.len(), 1); - - assert_eq!( - mt.canopy.capacity(), - ConcurrentMerkleTree::::canopy_size(CANOPY_DEPTH) - ); - - assert_eq!(mt.root(), Poseidon::zero_bytes()[HEIGHT]); - } - - let mut rng = thread_rng(); - - for _ in 0..OPERATIONS { - // Reload the tree from bytes on each iteration. - let mut mt_2 = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - &mut bytes, - ) - .unwrap(); - - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - mt_1.append(&leaf).unwrap(); - mt_2.append(&leaf).unwrap(); - - assert_eq!(mt_1, *mt_2); - } - - // Read a copy of that Merkle tree. - let mt_2 = ConcurrentMerkleTreeCopy::::from_bytes_copy(&bytes).unwrap(); - - assert_eq!(mt_1.height, mt_2.height); - assert_eq!(mt_1.canopy_depth, mt_2.canopy_depth); - assert_eq!(mt_1.next_index(), mt_2.next_index()); - assert_eq!(mt_1.sequence_number(), mt_2.sequence_number()); - assert_eq!(mt_1.rightmost_leaf(), mt_2.rightmost_leaf()); - assert_eq!( - mt_1.filled_subtrees.as_slice(), - mt_2.filled_subtrees.as_slice() - ); - } - - #[test] - fn test_from_bytes_copy_26_1400_2400_10_256_1024() { - const HEIGHT: usize = 26; - const CHANGELOG_SIZE: usize = 1400; - const ROOTS: usize = 2400; - const CANOPY_DEPTH: usize = 10; - - const OPERATIONS: usize = 1024; - - from_bytes_copy::() - } -} diff --git a/concurrent/src/errors.rs b/concurrent/src/errors.rs deleted file mode 100644 index 9d36870..0000000 --- a/concurrent/src/errors.rs +++ /dev/null @@ -1,72 +0,0 @@ -use light_bounded_vec::BoundedVecError; -use light_hasher::errors::HasherError; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum ConcurrentMerkleTreeError { - #[error("Integer overflow")] - IntegerOverflow, - #[error("Invalid height, it has to be greater than 0")] - HeightZero, - #[error("Invalud height, expected {0}")] - InvalidHeight(usize), - #[error("Invalid changelog size, it has to be greater than 0. Changelog is used for storing Merkle paths during appends.")] - ChangelogZero, - #[error("Invalid number of roots, it has to be greater than 0")] - RootsZero, - #[error("Canopy depth has to be lower than height")] - CanopyGeThanHeight, - #[error("Merkle tree is full, cannot append more leaves.")] - TreeFull, - #[error("Number of leaves ({0}) exceeds the changelog capacity ({1}).")] - BatchGreaterThanChangelog(usize, usize), - #[error("Invalid proof length, expected {0}, got {1}.")] - InvalidProofLength(usize, usize), - #[error("Invalid Merkle proof, expected root: {0:?}, the provided proof produces root: {1:?}")] - InvalidProof([u8; 32], [u8; 32]), - #[error("Attempting to update the leaf which was updated by an another newest change.")] - CannotUpdateLeaf, - #[error("Cannot update the empty leaf")] - CannotUpdateEmpty, - #[error("The batch of leaves is empty")] - EmptyLeaves, - #[error("Invalid buffer size, expected {0}, got {1}")] - BufferSize(usize, usize), - #[error("Hasher error: {0}")] - Hasher(#[from] HasherError), - #[error("Bounded vector error: {0}")] - BoundedVec(#[from] BoundedVecError), -} - -// NOTE(vadorovsky): Unfortunately, we need to do it by hand. `num_derive::ToPrimitive` -// doesn't support data-carrying enums. -#[cfg(feature = "solana")] -impl From for u32 { - fn from(e: ConcurrentMerkleTreeError) -> u32 { - match e { - ConcurrentMerkleTreeError::IntegerOverflow => 10001, - ConcurrentMerkleTreeError::HeightZero => 10002, - ConcurrentMerkleTreeError::InvalidHeight(_) => 10003, - ConcurrentMerkleTreeError::ChangelogZero => 10004, - ConcurrentMerkleTreeError::RootsZero => 10005, - ConcurrentMerkleTreeError::CanopyGeThanHeight => 10006, - ConcurrentMerkleTreeError::TreeFull => 10007, - ConcurrentMerkleTreeError::BatchGreaterThanChangelog(_, _) => 10008, - ConcurrentMerkleTreeError::InvalidProofLength(_, _) => 10009, - ConcurrentMerkleTreeError::InvalidProof(_, _) => 10010, - ConcurrentMerkleTreeError::CannotUpdateLeaf => 10011, - ConcurrentMerkleTreeError::CannotUpdateEmpty => 10012, - ConcurrentMerkleTreeError::EmptyLeaves => 10013, - ConcurrentMerkleTreeError::BufferSize(_, _) => 10014, - ConcurrentMerkleTreeError::Hasher(e) => e.into(), - ConcurrentMerkleTreeError::BoundedVec(e) => e.into(), - } - } -} - -#[cfg(feature = "solana")] -impl From for solana_program::program_error::ProgramError { - fn from(e: ConcurrentMerkleTreeError) -> Self { - solana_program::program_error::ProgramError::Custom(e.into()) - } -} diff --git a/concurrent/src/event.rs b/concurrent/src/event.rs deleted file mode 100644 index 066b72d..0000000 --- a/concurrent/src/event.rs +++ /dev/null @@ -1,87 +0,0 @@ -use borsh::{BorshDeserialize, BorshSerialize}; -#[derive(BorshDeserialize, BorshSerialize, Debug)] -pub struct MerkleTreeEvents { - pub events: Vec, -} - -/// Event containing the Merkle path of the given -/// [`StateMerkleTree`](light_merkle_tree_program::state::StateMerkleTree) -/// change. Indexers can use this type of events to re-build a non-sparse -/// version of state Merkle tree. -#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq)] -#[repr(C)] -pub enum MerkleTreeEvent { - V1(ChangelogEvent), - V2(NullifierEvent), - V3(IndexedMerkleTreeEvent), -} - -/// Node of the Merkle path with an index representing the position in a -/// non-sparse Merkle tree. -#[derive(BorshDeserialize, BorshSerialize, Debug, Eq, PartialEq)] -pub struct PathNode { - pub node: [u8; 32], - pub index: u32, -} - -/// Version 1 of the [`ChangelogEvent`](light_merkle_tree_program::state::ChangelogEvent). -#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq)] -pub struct ChangelogEvent { - /// Public key of the tree. - pub id: [u8; 32], - // Merkle paths. - pub paths: Vec>, - /// Number of successful operations on the on-chain tree. - pub seq: u64, - /// Changelog event index. - pub index: u32, -} - -#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq)] -pub struct NullifierEvent { - /// Public key of the tree. - pub id: [u8; 32], - /// Indices of leaves that were nullified. - /// Nullified means updated with [0u8;32]. - pub nullified_leaves_indices: Vec, - /// Number of successful operations on the on-chain tree. - /// seq corresponds to leaves[0]. - /// seq + 1 corresponds to leaves[1]. - pub seq: u64, -} - -#[derive(Debug, Default, Clone, Copy, BorshSerialize, BorshDeserialize, Eq, PartialEq)] -pub struct RawIndexedElement -where - I: Clone, -{ - pub value: [u8; 32], - pub next_index: I, - pub next_value: [u8; 32], - pub index: I, -} - -#[derive(BorshDeserialize, BorshSerialize, Debug, Clone, PartialEq)] -pub struct IndexedMerkleTreeUpdate -where - I: Clone, -{ - pub new_low_element: RawIndexedElement, - /// Leaf hash in new_low_element.index. - pub new_low_element_hash: [u8; 32], - pub new_high_element: RawIndexedElement, - /// Leaf hash in new_high_element.index, - /// is equivalent with next_index. - pub new_high_element_hash: [u8; 32], -} - -#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq)] -pub struct IndexedMerkleTreeEvent { - /// Public key of the tree. - pub id: [u8; 32], - pub updates: Vec>, - /// Number of successful operations on the on-chain tree. - /// seq corresponds to leaves[0]. - /// seq + 1 corresponds to leaves[1]. - pub seq: u64, -} diff --git a/concurrent/src/hash.rs b/concurrent/src/hash.rs deleted file mode 100644 index fe15776..0000000 --- a/concurrent/src/hash.rs +++ /dev/null @@ -1,41 +0,0 @@ -use light_bounded_vec::BoundedVec; -use light_hasher::Hasher; - -use crate::errors::ConcurrentMerkleTreeError; - -/// Returns the hash of the parent node based on the provided `node` (with its -/// `node_index`) and `sibling` (with its `sibling_index`). -pub fn compute_parent_node( - node: &[u8; 32], - sibling: &[u8; 32], - node_index: usize, - level: usize, -) -> Result<[u8; 32], ConcurrentMerkleTreeError> -where - H: Hasher, -{ - let is_left = (node_index >> level) & 1 == 0; - let hash = if is_left { - H::hashv(&[node, sibling])? - } else { - H::hashv(&[sibling, node])? - }; - Ok(hash) -} - -/// Computes the root for the given `leaf` (with index `i`) and `proof`. It -/// doesn't perform the validation of the provided `proof`. -pub fn compute_root( - leaf: &[u8; 32], - leaf_index: usize, - proof: &BoundedVec<[u8; 32]>, -) -> Result<[u8; 32], ConcurrentMerkleTreeError> -where - H: Hasher, -{ - let mut node = *leaf; - for (level, sibling) in proof.iter().enumerate() { - node = compute_parent_node::(&node, sibling, leaf_index, level)?; - } - Ok(node) -} diff --git a/concurrent/src/lib.rs b/concurrent/src/lib.rs deleted file mode 100644 index 6f9c1c6..0000000 --- a/concurrent/src/lib.rs +++ /dev/null @@ -1,702 +0,0 @@ -use std::{ - alloc::{self, handle_alloc_error, Layout}, - iter::Skip, - marker::PhantomData, - mem, -}; - -use changelog::ChangelogPath; -use light_bounded_vec::{ - BoundedVec, BoundedVecMetadata, CyclicBoundedVec, CyclicBoundedVecIterator, - CyclicBoundedVecMetadata, -}; -pub use light_hasher; -use light_hasher::Hasher; - -pub mod changelog; -pub mod copy; -pub mod errors; -pub mod event; -pub mod hash; -pub mod zero_copy; - -use crate::{ - changelog::ChangelogEntry, - errors::ConcurrentMerkleTreeError, - hash::{compute_parent_node, compute_root}, -}; - -/// [Concurrent Merkle tree](https://drive.google.com/file/d/1BOpa5OFmara50fTvL0VIVYjtg-qzHCVc/view) -/// which allows for multiple requests of updating leaves, without making any -/// of the requests invalid, as long as they are not modyfing the same leaf. -/// -/// When any of the above happens, some of the concurrent requests are going to -/// be invalid, forcing the clients to re-generate the Merkle proof. But that's -/// still better than having such a failure after any update happening in the -/// middle of requesting the update. -/// -/// Due to ability to make a decent number of concurrent update requests to be -/// valid, no lock is necessary. -#[repr(C)] -#[derive(Debug)] -// TODO(vadorovsky): The only reason why are we still keeping `HEIGHT` as a -// const generic here is that removing it would require keeping a `BoundecVec` -// inside `CyclicBoundedVec`. Casting byte slices to such nested vector is not -// a trivial task, but we might eventually do it at some point. -pub struct ConcurrentMerkleTree -where - H: Hasher, -{ - pub height: usize, - pub canopy_depth: usize, - - pub next_index: *mut usize, - pub sequence_number: *mut usize, - pub rightmost_leaf: *mut [u8; 32], - - /// Hashes of subtrees. - pub filled_subtrees: BoundedVec<[u8; 32]>, - /// History of Merkle proofs. - pub changelog: CyclicBoundedVec>, - /// History of roots. - pub roots: CyclicBoundedVec<[u8; 32]>, - /// Cached upper nodes. - pub canopy: BoundedVec<[u8; 32]>, - - pub _hasher: PhantomData, -} - -pub type ConcurrentMerkleTree26 = ConcurrentMerkleTree; - -impl ConcurrentMerkleTree -where - H: Hasher, -{ - /// Number of nodes to include in canopy, based on `canopy_depth`. - #[inline(always)] - pub fn canopy_size(canopy_depth: usize) -> usize { - (1 << (canopy_depth + 1)) - 2 - } - - /// Size of the struct **without** dynamically sized fields (`BoundedVec`, - /// `CyclicBoundedVec`). - pub fn non_dyn_fields_size() -> usize { - // height - mem::size_of::() - // changelog_capacity - + mem::size_of::() - // next_index - + mem::size_of::() - // sequence_number - + mem::size_of::() - // rightmost_leaf - + mem::size_of::<[u8; 32]>() - // filled_subtrees (metadata) - + mem::size_of::() - // changelog (metadata) - + mem::size_of::() - // roots (metadata) - + mem::size_of::() - // canopy (metadata) - + mem::size_of::() - } - - // TODO(vadorovsky): Make a macro for that. - pub fn size_in_account( - height: usize, - changelog_size: usize, - roots_size: usize, - canopy_depth: usize, - ) -> usize { - // non-dynamic fields - Self::non_dyn_fields_size() - // filled_subtrees - + mem::size_of::<[u8; 32]>() * height - // changelog - + mem::size_of::>() * changelog_size - // roots - + mem::size_of::<[u8; 32]>() * roots_size - // canopy - + mem::size_of::<[u8; 32]>() * Self::canopy_size(canopy_depth) - } - - fn check_size_constraints_new( - height: usize, - changelog_size: usize, - roots_size: usize, - canopy_depth: usize, - ) -> Result<(), ConcurrentMerkleTreeError> { - if height == 0 || HEIGHT == 0 { - return Err(ConcurrentMerkleTreeError::HeightZero); - } - if height != HEIGHT { - return Err(ConcurrentMerkleTreeError::InvalidHeight(HEIGHT)); - } - if canopy_depth > height { - return Err(ConcurrentMerkleTreeError::CanopyGeThanHeight); - } - // Changelog needs to be at least 1, because it's used for storing - // Merkle paths in `append`/`append_batch`. - if changelog_size == 0 { - return Err(ConcurrentMerkleTreeError::ChangelogZero); - } - if roots_size == 0 { - return Err(ConcurrentMerkleTreeError::RootsZero); - } - Ok(()) - } - - fn check_size_constraints(&self) -> Result<(), ConcurrentMerkleTreeError> { - Self::check_size_constraints_new( - self.height, - self.changelog.capacity(), - self.roots.capacity(), - self.canopy_depth, - ) - } - - pub fn new( - height: usize, - changelog_size: usize, - roots_size: usize, - canopy_depth: usize, - ) -> Result { - Self::check_size_constraints_new(height, changelog_size, roots_size, canopy_depth)?; - - let layout = Layout::new::(); - let next_index = unsafe { alloc::alloc(layout) as *mut usize }; - if next_index.is_null() { - handle_alloc_error(layout); - } - unsafe { *next_index = 0 }; - - let layout = Layout::new::(); - let sequence_number = unsafe { alloc::alloc(layout) as *mut usize }; - if sequence_number.is_null() { - handle_alloc_error(layout); - } - unsafe { *sequence_number = 0 }; - - let layout = Layout::new::<[u8; 32]>(); - let rightmost_leaf = unsafe { alloc::alloc(layout) as *mut [u8; 32] }; - if rightmost_leaf.is_null() { - handle_alloc_error(layout); - } - unsafe { *rightmost_leaf = [0u8; 32] }; - - Ok(Self { - height, - canopy_depth, - - next_index, - sequence_number, - rightmost_leaf, - - filled_subtrees: BoundedVec::with_capacity(height), - changelog: CyclicBoundedVec::with_capacity(changelog_size), - roots: CyclicBoundedVec::with_capacity(roots_size), - canopy: BoundedVec::with_capacity(Self::canopy_size(canopy_depth)), - - _hasher: PhantomData, - }) - } - - /// Initializes the Merkle tree. - pub fn init(&mut self) -> Result<(), ConcurrentMerkleTreeError> { - self.check_size_constraints()?; - - // Initialize root. - let root = H::zero_bytes()[self.height]; - self.roots.push(root); - - // Initialize changelog. - let path = ChangelogPath::from_fn(|i| Some(H::zero_bytes()[i])); - let changelog_entry = ChangelogEntry { path, index: 0 }; - self.changelog.push(changelog_entry); - - // Initialize filled subtrees. - for i in 0..self.height { - self.filled_subtrees.push(H::zero_bytes()[i]).unwrap(); - } - - // Initialize canopy. - for level_i in 0..self.canopy_depth { - let level_nodes = 1 << (level_i + 1); - for _ in 0..level_nodes { - let node = H::zero_bytes()[self.height - level_i - 1]; - self.canopy.push(node)?; - } - } - - Ok(()) - } - - /// Returns the index of the current changelog entry. - pub fn changelog_index(&self) -> usize { - self.changelog.last_index() - } - - /// Returns the index of the current root in the tree's root buffer. - pub fn root_index(&self) -> usize { - self.roots.last_index() - } - - /// Returns the current root. - pub fn root(&self) -> [u8; 32] { - // PANICS: This should never happen - there is always a root in the - // tree and `self.root_index()` should always point to an existing index. - self.roots[self.root_index()] - } - - pub fn current_index(&self) -> usize { - let next_index = self.next_index(); - if next_index > 0 { - next_index - 1 - } else { - next_index - } - } - - pub fn next_index(&self) -> usize { - unsafe { *self.next_index } - } - - fn inc_next_index(&mut self) -> Result<(), ConcurrentMerkleTreeError> { - unsafe { - *self.next_index = self - .next_index() - .checked_add(1) - .ok_or(ConcurrentMerkleTreeError::IntegerOverflow)?; - } - Ok(()) - } - - pub fn sequence_number(&self) -> usize { - unsafe { *self.sequence_number } - } - - fn inc_sequence_number(&mut self) -> Result<(), ConcurrentMerkleTreeError> { - unsafe { - *self.sequence_number = self - .sequence_number() - .checked_add(1) - .ok_or(ConcurrentMerkleTreeError::IntegerOverflow)?; - } - Ok(()) - } - - pub fn rightmost_leaf(&self) -> [u8; 32] { - unsafe { *self.rightmost_leaf } - } - - fn set_rightmost_leaf(&mut self, leaf: &[u8; 32]) { - unsafe { *self.rightmost_leaf = *leaf }; - } - - pub fn update_proof_from_canopy( - &self, - leaf_index: usize, - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(), ConcurrentMerkleTreeError> { - let mut node_index = ((1 << self.height) + leaf_index) >> (self.height - self.canopy_depth); - while node_index > 1 { - // `node_index - 2` maps to the canopy index. - let canopy_index = node_index - 2; - let canopy_index = if canopy_index % 2 == 0 { - canopy_index + 1 - } else { - canopy_index - 1 - }; - proof.push(self.canopy[canopy_index])?; - node_index >>= 1; - } - - Ok(()) - } - - /// Returns an iterator with changelog entries newer than the requested - /// `changelog_index`. - pub fn changelog_entries( - &self, - changelog_index: usize, - ) -> Result>>, ConcurrentMerkleTreeError> - { - // `CyclicBoundedVec::iter_from` returns an iterator which includes also - // the element indicated by the provided index. - // - // However, we want to iterate only on changelog events **newer** than - // the provided one. - // - // Calling `iter_from(changelog_index + 1)` wouldn't work. If - // `changelog_index` points to the newest changelog entry, - // `changelog_index + 1` would point to the **oldest** changelog entry. - // That would result in iterating over the whole changelog - from the - // oldest to the newest element. - Ok(self.changelog.iter_from(changelog_index)?.skip(1)) - } - - /// Updates the given Merkle proof. - /// - /// The update is performed by checking whether there are any new changelog - /// entries and whether they contain changes which affect the current - /// proof. To be precise, for each changelog entry, it's done in the - /// following steps: - /// - /// * Check if the changelog entry was directly updating the `leaf_index` - /// we are trying to update. - /// * If no (we check that condition first, since it's more likely), - /// it means that there is a change affecting the proof, but not the - /// leaf. - /// Check which element from our proof was affected by the change - /// (using the `critbit_index` method) and update it (copy the new - /// element from the changelog to our updated proof). - /// * If yes, it means that the same leaf we want to update was already - /// updated. In such case, updating the proof is not possible. - pub fn update_proof_from_changelog( - &self, - changelog_index: usize, - leaf_index: usize, - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(), ConcurrentMerkleTreeError> { - // Iterate over changelog entries starting from the requested - // `changelog_index`. - // - // Since we are interested only in subsequent, new changelog entries, - // skip the first result. - for changelog_entry in self.changelog_entries(changelog_index)? { - changelog_entry.update_proof(leaf_index, proof)?; - } - - Ok(()) - } - - /// Checks whether the given Merkle `proof` for the given `node` (with index - /// `i`) is valid. The proof is valid when computing parent node hashes using - /// the whole path of the proof gives the same result as the given `root`. - pub fn validate_proof( - &self, - leaf: &[u8; 32], - leaf_index: usize, - proof: &BoundedVec<[u8; 32]>, - ) -> Result<(), ConcurrentMerkleTreeError> { - let expected_root = self.root(); - let computed_root = compute_root::(leaf, leaf_index, proof)?; - if computed_root == expected_root { - Ok(()) - } else { - Err(ConcurrentMerkleTreeError::InvalidProof( - expected_root, - computed_root, - )) - } - } - - /// Updates the leaf under `leaf_index` with the `new_leaf` value. - /// - /// 1. Computes the new path and root from `new_leaf` and Merkle proof - /// (`proof`). - /// 2. Stores the new path as the latest changelog entry and increments the - /// latest changelog index. - /// 3. Stores the latest root and increments the latest root index. - /// 4. If new leaf is at the rightmost index, stores it as the new - /// rightmost leaft and stores the Merkle proof as the new rightmost - /// proof. - /// - /// # Validation - /// - /// This method doesn't validate the proof. Caller is responsible for - /// doing that before. - fn update_leaf_in_tree( - &mut self, - new_leaf: &[u8; 32], - leaf_index: usize, - proof: &BoundedVec<[u8; 32]>, - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - let mut changelog_entry = ChangelogEntry::default_with_index(leaf_index); - let mut current_node = *new_leaf; - for (level, sibling) in proof.iter().enumerate() { - changelog_entry.path[level] = Some(current_node); - current_node = compute_parent_node::(¤t_node, sibling, leaf_index, level)?; - } - - self.inc_sequence_number()?; - - self.roots.push(current_node); - - // Check if the leaf is the last leaf in the tree. - if self.next_index() < (1 << self.height) { - changelog_entry.update_proof(self.next_index(), &mut self.filled_subtrees)?; - // Check if we updated the rightmost leaf. - if leaf_index >= self.current_index() { - self.set_rightmost_leaf(new_leaf); - } - } - self.changelog.push(changelog_entry); - - if self.canopy_depth > 0 { - self.update_canopy(self.changelog.last_index(), 1); - } - - Ok((self.changelog.last_index(), self.sequence_number())) - } - - /// Replaces the `old_leaf` under the `leaf_index` with a `new_leaf`, using - /// the given `proof` and `changelog_index` (pointing to the changelog entry - /// which was the newest at the time of preparing the proof). - #[inline(never)] - pub fn update( - &mut self, - changelog_index: usize, - old_leaf: &[u8; 32], - new_leaf: &[u8; 32], - leaf_index: usize, - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - let expected_proof_len = self.height - self.canopy_depth; - if proof.len() != expected_proof_len { - return Err(ConcurrentMerkleTreeError::InvalidProofLength( - expected_proof_len, - proof.len(), - )); - } - if leaf_index >= self.next_index() { - return Err(ConcurrentMerkleTreeError::CannotUpdateEmpty); - } - - if self.canopy_depth > 0 { - self.update_proof_from_canopy(leaf_index, proof)?; - } - if changelog_index != self.changelog_index() { - self.update_proof_from_changelog(changelog_index, leaf_index, proof)?; - } - self.validate_proof(old_leaf, leaf_index, proof)?; - self.update_leaf_in_tree(new_leaf, leaf_index, proof) - } - - /// Appends a new leaf to the tree. - pub fn append(&mut self, leaf: &[u8; 32]) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - self.append_batch(&[leaf]) - } - - /// Appends a new leaf to the tree. Saves Merkle proof to the provided - /// `proof` reference. - pub fn append_with_proof( - &mut self, - leaf: &[u8; 32], - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - self.append_batch_with_proofs(&[leaf], &mut [proof]) - } - - /// Appends a batch of new leaves to the tree. - pub fn append_batch( - &mut self, - leaves: &[&[u8; 32]], - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - self.append_batch_common::(leaves, None) - } - - /// Appends a batch of new leaves to the tree. Saves Merkle proofs to the - /// provided `proofs` slice. - pub fn append_batch_with_proofs( - &mut self, - leaves: &[&[u8; 32]], - proofs: &mut [&mut BoundedVec<[u8; 32]>], - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - self.append_batch_common::(leaves, Some(proofs)) - } - - /// Appends a batch of new leaves to the tree. - /// - /// This method contains the common logic and is not intended for external - /// use. Callers should choose between [`append_batch`](ConcurrentMerkleTree::append_batch) - /// and [`append_batch_with_proofs`](ConcurrentMerkleTree::append_batch_with_proofs). - fn append_batch_common< - // The only purpose of this const generic is to force compiler to - // produce separate functions, with and without proof. - // - // Unfortunately, using `Option` is not enough: - // - // https://godbolt.org/z/fEMMfMdPc - // https://godbolt.org/z/T3dxnjMzz - // - // Using the const generic helps and ends up generating two separate - // functions: - // - // https://godbolt.org/z/zGnM7Ycn1 - const WITH_PROOFS: bool, - >( - &mut self, - leaves: &[&[u8; 32]], - // Slice for saving Merkle proofs. - // - // Currently it's used only for indexed Merkle trees. - mut proofs: Option<&mut [&mut BoundedVec<[u8; 32]>]>, - ) -> Result<(usize, usize), ConcurrentMerkleTreeError> { - if leaves.is_empty() { - return Err(ConcurrentMerkleTreeError::EmptyLeaves); - } - if (self.next_index() + leaves.len() - 1) >= 1 << self.height { - return Err(ConcurrentMerkleTreeError::TreeFull); - } - if leaves.len() > self.changelog.capacity() { - return Err(ConcurrentMerkleTreeError::BatchGreaterThanChangelog( - leaves.len(), - self.changelog.capacity(), - )); - } - - let first_changelog_index = (self.changelog.last_index() + 1) % self.changelog.capacity(); - let first_sequence_number = self.sequence_number() + 1; - - for (leaf_i, leaf) in leaves.iter().enumerate() { - let mut current_index = self.next_index(); - - self.changelog - .push(ChangelogEntry::::default_with_index(current_index)); - let changelog_index = self.changelog_index(); - - let mut current_node = **leaf; - - self.changelog[changelog_index].path[0] = Some(**leaf); - - for i in 0..self.height { - let is_left = current_index % 2 == 0; - - if is_left { - // If the current node is on the left side: - // - // U - // / \ - // CUR SIB - // / \ - // N N - // - // * The sibling (on the right) is a "zero node". - // * That "zero node" becomes a part of Merkle proof. - // * The upper (next current) node is `H(cur, Ø)`. - let empty_node = H::zero_bytes()[i]; - - if WITH_PROOFS { - // PANICS: `proofs` should be always `Some` at this point. - proofs.as_mut().unwrap()[leaf_i].push(empty_node)?; - } - - self.filled_subtrees[i] = current_node; - - // For all non-terminal leaves, stop computing parents as - // soon as we are on the left side. - // Computation of the parent nodes is going to happen in - // the next iterations. - if leaf_i < leaves.len() - 1 { - break; - } - - current_node = H::hashv(&[¤t_node, &empty_node])?; - } else { - // If the current node is on the right side: - // - // U - // / \ - // SIB CUR - // / \ - // N N - // * The sigling on the left is a "filled subtree". - // * That "filled subtree" becomes a part of Merkle proof. - // * The upper (next current) node is `H(sib, cur)`. - - if WITH_PROOFS { - // PANICS: `proofs` should be always `Some` at this point. - proofs.as_mut().unwrap()[leaf_i].push(self.filled_subtrees[i])?; - } - - current_node = H::hashv(&[&self.filled_subtrees[i], ¤t_node])?; - } - - if i < self.height - 1 { - self.changelog[changelog_index].path[i + 1] = Some(current_node); - } - - current_index /= 2; - } - - if leaf_i == leaves.len() - 1 { - self.roots.push(current_node); - } else { - // Photon returns only the sequence number and we use it in the - // JS client and forester to derive the root index. Therefore, - // we need to emit a "zero root" to not break that property. - self.roots.push([0u8; 32]); - } - - self.inc_next_index()?; - self.inc_sequence_number()?; - - self.set_rightmost_leaf(leaf); - } - - if self.canopy_depth > 0 { - self.update_canopy(first_changelog_index, leaves.len()); - } - - Ok((first_changelog_index, first_sequence_number)) - } - - fn update_canopy(&mut self, first_changelog_index: usize, num_leaves: usize) { - for i in 0..num_leaves { - println!("canopy: {:?}", self.canopy); - let changelog_index = (first_changelog_index + i) % self.changelog.capacity(); - for (i, path_node) in self.changelog[changelog_index] - .path - .iter() - .rev() - .take(self.canopy_depth) - .enumerate() - { - if let Some(path_node) = path_node { - let level = self.height - i - 1; - let index = (1 << (self.height - level)) - + (self.changelog[changelog_index].index >> level); - // `index - 2` maps to the canopy index. - self.canopy[(index - 2) as usize] = *path_node; - } - } - } - } -} - -impl Drop for ConcurrentMerkleTree -where - H: Hasher, -{ - fn drop(&mut self) { - let layout = Layout::new::(); - unsafe { alloc::dealloc(self.next_index as *mut u8, layout) }; - - let layout = Layout::new::(); - unsafe { alloc::dealloc(self.sequence_number as *mut u8, layout) }; - - let layout = Layout::new::<[u8; 32]>(); - unsafe { alloc::dealloc(self.rightmost_leaf as *mut u8, layout) }; - } -} - -impl PartialEq for ConcurrentMerkleTree -where - H: Hasher, -{ - fn eq(&self, other: &Self) -> bool { - self.height.eq(&other.height) - && self.canopy_depth.eq(&other.canopy_depth) - && self.next_index().eq(&other.next_index()) - && self.sequence_number().eq(&other.sequence_number()) - && self.rightmost_leaf().eq(&other.rightmost_leaf()) - && self - .filled_subtrees - .as_slice() - .eq(other.filled_subtrees.as_slice()) - && self.changelog.iter().eq(other.changelog.iter()) - && self.roots.iter().eq(other.roots.iter()) - && self.canopy.as_slice().eq(other.canopy.as_slice()) - } -} diff --git a/concurrent/src/zero_copy.rs b/concurrent/src/zero_copy.rs deleted file mode 100644 index c728a57..0000000 --- a/concurrent/src/zero_copy.rs +++ /dev/null @@ -1,369 +0,0 @@ -use std::{ - marker::PhantomData, - mem, - ops::{Deref, DerefMut}, -}; - -use light_bounded_vec::{ - BoundedVec, BoundedVecMetadata, CyclicBoundedVec, CyclicBoundedVecMetadata, -}; -use light_hasher::Hasher; -use light_utils::offset::zero_copy::{read_array_like_ptr_at, read_ptr_at, write_at}; -use memoffset::{offset_of, span_of}; - -use crate::{errors::ConcurrentMerkleTreeError, ConcurrentMerkleTree}; - -#[derive(Debug)] -pub struct ConcurrentMerkleTreeZeroCopy<'a, H, const HEIGHT: usize> -where - H: Hasher, -{ - merkle_tree: mem::ManuallyDrop>, - // The purpose of this field is ensuring that the wrapper does not outlive - // the buffer. - _bytes: &'a [u8], -} - -impl<'a, H, const HEIGHT: usize> ConcurrentMerkleTreeZeroCopy<'a, H, HEIGHT> -where - H: Hasher, -{ - pub fn struct_from_bytes_zero_copy( - bytes: &'a [u8], - ) -> Result<(ConcurrentMerkleTree, usize), ConcurrentMerkleTreeError> { - let expected_size = ConcurrentMerkleTree::::non_dyn_fields_size(); - if bytes.len() < expected_size { - return Err(ConcurrentMerkleTreeError::BufferSize( - expected_size, - bytes.len(), - )); - } - - let height = usize::from_le_bytes( - bytes[span_of!(ConcurrentMerkleTree, height)] - .try_into() - .unwrap(), - ); - let canopy_depth = usize::from_le_bytes( - bytes[span_of!(ConcurrentMerkleTree, canopy_depth)] - .try_into() - .unwrap(), - ); - - let mut offset = offset_of!(ConcurrentMerkleTree, next_index); - - let next_index = unsafe { read_ptr_at(bytes, &mut offset) }; - let sequence_number = unsafe { read_ptr_at(bytes, &mut offset) }; - let rightmost_leaf = unsafe { read_ptr_at(bytes, &mut offset) }; - let filled_subtrees_metadata = unsafe { read_ptr_at(bytes, &mut offset) }; - let changelog_metadata: *mut CyclicBoundedVecMetadata = - unsafe { read_ptr_at(bytes, &mut offset) }; - let roots_metadata: *mut CyclicBoundedVecMetadata = - unsafe { read_ptr_at(bytes, &mut offset) }; - let canopy_metadata = unsafe { read_ptr_at(bytes, &mut offset) }; - - let expected_size = ConcurrentMerkleTree::::size_in_account( - height, - unsafe { (*changelog_metadata).capacity() }, - unsafe { (*roots_metadata).capacity() }, - canopy_depth, - ); - if bytes.len() < expected_size { - return Err(ConcurrentMerkleTreeError::BufferSize( - expected_size, - bytes.len(), - )); - } - - let filled_subtrees = unsafe { - BoundedVec::from_raw_parts( - filled_subtrees_metadata, - read_array_like_ptr_at(bytes, &mut offset, height), - ) - }; - let changelog = unsafe { - CyclicBoundedVec::from_raw_parts( - changelog_metadata, - read_array_like_ptr_at(bytes, &mut offset, (*changelog_metadata).capacity()), - ) - }; - let roots = unsafe { - CyclicBoundedVec::from_raw_parts( - roots_metadata, - read_array_like_ptr_at(bytes, &mut offset, (*roots_metadata).capacity()), - ) - }; - let canopy = unsafe { - BoundedVec::from_raw_parts( - canopy_metadata, - read_array_like_ptr_at(bytes, &mut offset, (*canopy_metadata).capacity()), - ) - }; - - let merkle_tree = ConcurrentMerkleTree { - height, - canopy_depth, - next_index, - sequence_number, - rightmost_leaf, - filled_subtrees, - changelog, - roots, - canopy, - _hasher: PhantomData, - }; - merkle_tree.check_size_constraints()?; - - Ok((merkle_tree, offset)) - } - - pub fn from_bytes_zero_copy(bytes: &'a [u8]) -> Result { - let (merkle_tree, _) = Self::struct_from_bytes_zero_copy(bytes)?; - merkle_tree.check_size_constraints()?; - - Ok(Self { - merkle_tree: mem::ManuallyDrop::new(merkle_tree), - _bytes: bytes, - }) - } -} - -impl Deref for ConcurrentMerkleTreeZeroCopy<'_, H, HEIGHT> -where - H: Hasher, -{ - type Target = ConcurrentMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.merkle_tree - } -} - -impl Drop for ConcurrentMerkleTreeZeroCopy<'_, H, HEIGHT> -where - H: Hasher, -{ - fn drop(&mut self) { - // SAFETY: Don't do anything here! Why? - // - // * Primitive fields of `ConcurrentMerkleTree` implement `Copy`, - // therefore `drop()` has no effect on them - Rust drops them when - // they go out of scope. - // * Don't drop the dynamic fields (`filled_subtrees`, `roots` etc.). In - // `ConcurrentMerkleTreeZeroCopy`, they are backed by buffers provided - // by the caller. These buffers are going to be eventually deallocated. - // Performing an another `drop()` here would result double `free()` - // which would result in aborting the program (either with `SIGABRT` - // or `SIGSEGV`). - } -} - -#[derive(Debug)] -pub struct ConcurrentMerkleTreeZeroCopyMut<'a, H, const HEIGHT: usize>( - ConcurrentMerkleTreeZeroCopy<'a, H, HEIGHT>, -) -where - H: Hasher; - -impl<'a, H, const HEIGHT: usize> ConcurrentMerkleTreeZeroCopyMut<'a, H, HEIGHT> -where - H: Hasher, -{ - pub fn from_bytes_zero_copy_mut( - bytes: &'a mut [u8], - ) -> Result { - Ok(Self(ConcurrentMerkleTreeZeroCopy::from_bytes_zero_copy( - bytes, - )?)) - } - - pub fn fill_non_dyn_fields_in_buffer( - bytes: &mut [u8], - height: usize, - canopy_depth: usize, - changelog_capacity: usize, - roots_capacity: usize, - ) -> Result { - let expected_size = ConcurrentMerkleTree::::size_in_account( - height, - changelog_capacity, - roots_capacity, - canopy_depth, - ); - if bytes.len() < expected_size { - return Err(ConcurrentMerkleTreeError::BufferSize( - expected_size, - bytes.len(), - )); - } - - bytes[span_of!(ConcurrentMerkleTree, height)] - .copy_from_slice(&height.to_le_bytes()); - bytes[span_of!(ConcurrentMerkleTree, canopy_depth)] - .copy_from_slice(&canopy_depth.to_le_bytes()); - - let mut offset = offset_of!(ConcurrentMerkleTree, next_index); - // next_index - write_at::(bytes, &0_usize.to_le_bytes(), &mut offset); - // sequence_number - write_at::(bytes, &0_usize.to_le_bytes(), &mut offset); - // rightmost_leaf - write_at::<[u8; 32]>(bytes, &H::zero_bytes()[0], &mut offset); - // filled_subtrees (metadata) - let filled_subtrees_metadata = BoundedVecMetadata::new(height); - write_at::(bytes, &filled_subtrees_metadata.to_le_bytes(), &mut offset); - // changelog (metadata) - let changelog_metadata = CyclicBoundedVecMetadata::new(changelog_capacity); - write_at::(bytes, &changelog_metadata.to_le_bytes(), &mut offset); - // roots (metadata) - let roots_metadata = CyclicBoundedVecMetadata::new(roots_capacity); - write_at::(bytes, &roots_metadata.to_le_bytes(), &mut offset); - // canopy (metadata) - let canopy_size = ConcurrentMerkleTree::::canopy_size(canopy_depth); - let canopy_metadata = BoundedVecMetadata::new(canopy_size); - write_at::(bytes, &canopy_metadata.to_le_bytes(), &mut offset); - - Ok(offset) - } - - pub fn from_bytes_zero_copy_init( - bytes: &'a mut [u8], - height: usize, - canopy_depth: usize, - changelog_capacity: usize, - roots_capacity: usize, - ) -> Result { - Self::fill_non_dyn_fields_in_buffer( - bytes, - height, - canopy_depth, - changelog_capacity, - roots_capacity, - )?; - Self::from_bytes_zero_copy_mut(bytes) - } -} - -impl Deref for ConcurrentMerkleTreeZeroCopyMut<'_, H, HEIGHT> -where - H: Hasher, -{ - type Target = ConcurrentMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.0.merkle_tree - } -} -impl DerefMut for ConcurrentMerkleTreeZeroCopyMut<'_, H, HEIGHT> -where - H: Hasher, -{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0.merkle_tree - } -} - -#[cfg(test)] -mod test { - use super::*; - - use ark_bn254::Fr; - use ark_ff::{BigInteger, PrimeField, UniformRand}; - use light_hasher::Poseidon; - use rand::{thread_rng, Rng}; - - fn load_from_bytes< - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY_DEPTH: usize, - const OPERATIONS: usize, - >() { - let mut mt_1 = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY_DEPTH) - .unwrap(); - mt_1.init().unwrap(); - - // Create a buffer with random bytes - the `*_init` method should - // initialize the buffer gracefully and the randomness shouldn't cause - // undefined behavior. - let mut bytes = vec![ - 0u8; - ConcurrentMerkleTree::::size_in_account( - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY_DEPTH - ) - ]; - thread_rng().fill(bytes.as_mut_slice()); - - // Initialize a Merkle tree on top of a byte slice. - { - let mut mt = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - bytes.as_mut_slice(), - HEIGHT, - CANOPY_DEPTH, - CHANGELOG, - ROOTS, - ) - .unwrap(); - mt.init().unwrap(); - - // Ensure that it was properly initialized. - assert_eq!(mt.height, HEIGHT); - assert_eq!(mt.canopy_depth, CANOPY_DEPTH,); - assert_eq!(mt.next_index(), 0); - assert_eq!(mt.sequence_number(), 0); - assert_eq!(mt.rightmost_leaf(), Poseidon::zero_bytes()[0]); - - assert_eq!(mt.filled_subtrees.capacity(), HEIGHT); - assert_eq!(mt.filled_subtrees.len(), HEIGHT); - - assert_eq!(mt.changelog.capacity(), CHANGELOG); - assert_eq!(mt.changelog.len(), 1); - - assert_eq!(mt.roots.capacity(), ROOTS); - assert_eq!(mt.roots.len(), 1); - - assert_eq!( - mt.canopy.capacity(), - ConcurrentMerkleTree::::canopy_size(CANOPY_DEPTH) - ); - - assert_eq!(mt.root(), Poseidon::zero_bytes()[HEIGHT]); - } - - let mut rng = thread_rng(); - - for _ in 0..OPERATIONS { - // Reload the tree from bytes on each iteration. - let mut mt_2 = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - &mut bytes, - ) - .unwrap(); - - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - mt_1.append(&leaf).unwrap(); - mt_2.append(&leaf).unwrap(); - - assert_eq!(mt_1, *mt_2); - } - } - - #[test] - fn test_load_from_bytes_22_256_256_0_1024() { - load_from_bytes::<22, 256, 256, 0, 1024>() - } - - #[test] - fn test_load_from_bytes_22_256_256_10_1024() { - load_from_bytes::<22, 256, 256, 10, 1024>() - } -} diff --git a/concurrent/tests/tests.rs b/concurrent/tests/tests.rs deleted file mode 100644 index f409dec..0000000 --- a/concurrent/tests/tests.rs +++ /dev/null @@ -1,3421 +0,0 @@ -use ark_bn254::Fr; -use ark_ff::{BigInteger, PrimeField, UniformRand}; -use light_bounded_vec::{BoundedVec, BoundedVecError, CyclicBoundedVec}; -use light_concurrent_merkle_tree::{ - changelog::{ChangelogEntry, ChangelogPath}, - errors::ConcurrentMerkleTreeError, - zero_copy::ConcurrentMerkleTreeZeroCopyMut, - ConcurrentMerkleTree, -}; -use light_hash_set::HashSet; -use light_hasher::{Hasher, Keccak, Poseidon, Sha256}; -use light_utils::rand::gen_range_exclude; -use num_bigint::BigUint; -use num_traits::FromBytes; -use rand::{rngs::ThreadRng, seq::SliceRandom, thread_rng, Rng}; -use std::cmp; - -/// Tests whether append operations work as expected. -fn append() -where - H: Hasher, -{ - const HEIGHT: usize = 4; - const CHANGELOG: usize = 32; - const ROOTS: usize = 256; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let leaf1 = H::hash(&[1u8; 32]).unwrap(); - - // The hash of our new leaf and its sibling (a zero value). - // - // H1 - // / \ - // L1 Z[0] - let h1 = H::hashv(&[&leaf1, &H::zero_bytes()[0]]).unwrap(); - - // The hash of `h1` and its sibling (a subtree represented by `Z[1]`). - // - // H2 - // /-/ \-\ - // H1 Z[1] - // / \ / \ - // L1 Z[0] Z[0] Z[0] - // - // `Z[1]` represents the whole subtree on the right from `h2`. In the next - // examples, we are just going to show empty subtrees instead of the whole - // hierarchy. - let h2 = H::hashv(&[&h1, &H::zero_bytes()[1]]).unwrap(); - - // The hash of `h3` and its sibling (a subtree represented by `Z[2]`). - // - // H3 - // / \ - // H2 Z[2] - // / \ - // H1 Z[1] - // / \ - // L1 Z[0] - let h3 = H::hashv(&[&h2, &H::zero_bytes()[2]]).unwrap(); - - // The hash of `h4` and its sibling (a subtree represented by `Z[3]`), - // which is the root. - // - // R - // / \ - // H3 Z[3] - // / \ - // H2 Z[2] - // / \ - // H1 Z[1] - // / \ - // L1 Z[0] - let expected_root = H::hashv(&[&h3, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(leaf1), Some(h1), Some(h2), Some(h3)]); - let expected_filled_subtrees = BoundedVec::from_array(&[leaf1, h1, h2, h3]); - - merkle_tree.append(&leaf1).unwrap(); - - assert_eq!(merkle_tree.changelog_index(), 1); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 0) - ); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 1); - assert_eq!(merkle_tree.filled_subtrees, expected_filled_subtrees); - assert_eq!(merkle_tree.next_index(), 1); - assert_eq!(merkle_tree.rightmost_leaf(), leaf1); - - // Appending the 2nd leaf should result in recomputing the root due to the - // change of the `h1`, which now is a hash of the two non-zero leafs. So - // when computing hashes from H2 up to the root, we are still going to use - // zero bytes. - // - // The other subtrees still remain the same. - // - // R - // / \ - // H3 Z[3] - // / \ - // H2 Z[2] - // / \ - // H1 Z[1] - // / \ - // L1 L2 - let leaf2 = H::hash(&[2u8; 32]).unwrap(); - - let h1 = H::hashv(&[&leaf1, &leaf2]).unwrap(); - let h2 = H::hashv(&[&h1, &H::zero_bytes()[1]]).unwrap(); - let h3 = H::hashv(&[&h2, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h3, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(leaf2), Some(h1), Some(h2), Some(h3)]); - let expected_filled_subtrees = BoundedVec::from_array(&[leaf1, h1, h2, h3]); - - merkle_tree.append(&leaf2).unwrap(); - - assert_eq!(merkle_tree.changelog_index(), 2); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 1), - ); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 2); - assert_eq!(merkle_tree.filled_subtrees, expected_filled_subtrees); - assert_eq!(merkle_tree.next_index(), 2); - assert_eq!(merkle_tree.rightmost_leaf(), leaf2); - - // Appending the 3rd leaf alters the next subtree on the right. - // Instead of using Z[1], we will end up with the hash of the new leaf and - // Z[0]. - // - // The other subtrees still remain the same. - // - // R - // / \ - // H4 Z[3] - // / \ - // H3 Z[2] - // / \ - // H1 H2 - // / \ / \ - // L1 L2 L3 Z[0] - let leaf3 = H::hash(&[3u8; 32]).unwrap(); - - let h1 = H::hashv(&[&leaf1, &leaf2]).unwrap(); - let h2 = H::hashv(&[&leaf3, &H::zero_bytes()[0]]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(leaf3), Some(h2), Some(h3), Some(h4)]); - let expected_filled_subtrees = BoundedVec::from_array(&[leaf3, h1, h3, h4]); - - merkle_tree.append(&leaf3).unwrap(); - - assert_eq!(merkle_tree.changelog_index(), 3); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 2), - ); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 3); - assert_eq!(merkle_tree.filled_subtrees, expected_filled_subtrees); - assert_eq!(merkle_tree.next_index(), 3); - assert_eq!(merkle_tree.rightmost_leaf(), leaf3); - - // Appending the 4th leaf alters the next subtree on the right. - // Instead of using Z[1], we will end up with the hash of the new leaf and - // Z[0]. - // - // The other subtrees still remain the same. - // - // R - // / \ - // H4 Z[3] - // / \ - // H3 Z[2] - // / \ - // H1 H2 - // / \ / \ - // L1 L2 L3 L4 - let leaf4 = H::hash(&[4u8; 32]).unwrap(); - - let h1 = H::hashv(&[&leaf1, &leaf2]).unwrap(); - let h2 = H::hashv(&[&leaf3, &leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(leaf4), Some(h2), Some(h3), Some(h4)]); - let expected_filled_subtrees = BoundedVec::from_array(&[leaf3, h1, h3, h4]); - - merkle_tree.append(&leaf4).unwrap(); - - assert_eq!(merkle_tree.changelog_index(), 4); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 3), - ); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 4); - assert_eq!(merkle_tree.filled_subtrees, expected_filled_subtrees); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), leaf4); -} - -/// Checks whether `append_with_proof` returns correct Merkle proofs. -fn append_with_proof< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, - const N_APPENDS: usize, ->() -where - H: Hasher, -{ - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - for i in 0..N_APPENDS { - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof = BoundedVec::with_capacity(HEIGHT); - merkle_tree.append_with_proof(&leaf, &mut proof).unwrap(); - reference_tree.append(&leaf).unwrap(); - - let reference_proof = reference_tree.get_proof_of_leaf(i, true).unwrap(); - - assert_eq!(proof, reference_proof); - } -} - -/// Performs invalid updates on the given Merkle tree by trying to swap all -/// parameters separately. Asserts the errors that the Merkle tree should -/// return as a part of validation of these inputs. -fn invalid_updates( - rng: &mut ThreadRng, - merkle_tree: &mut ConcurrentMerkleTree, - changelog_index: usize, - old_leaf: &[u8; 32], - new_leaf: &[u8; 32], - leaf_index: usize, - proof: BoundedVec<[u8; 32]>, -) where - H: Hasher, -{ - // This test case works only for larger changelogs, where there is a chance - // to encounter conflicting changelog entries. - // - // We assume that it's going to work for changelogs with capacity greater - // than 1. But the smaller the changelog and the more non-conflicting - // operations are done in between, the higher the chance of this check - // failing. If you ever encounter issues with reproducing this error, try - // tuning your changelog size or make sure that conflicting operations are - // done frequently enough. - if CHANGELOG > 1 { - let invalid_changelog_index = 0; - let mut proof_clone = proof.clone(); - let res = merkle_tree.update( - invalid_changelog_index, - old_leaf, - new_leaf, - leaf_index, - &mut proof_clone, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::CannotUpdateLeaf) - )); - } - - let invalid_old_leaf: [u8; 32] = Fr::rand(rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof_clone = proof.clone(); - let res = merkle_tree.update( - changelog_index, - &invalid_old_leaf, - new_leaf, - 0, - &mut proof_clone, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::InvalidProof(_, _)) - )); - - let invalid_index_in_range = gen_range_exclude(rng, 0..merkle_tree.next_index(), &[leaf_index]); - let mut proof_clone = proof.clone(); - let res = merkle_tree.update( - changelog_index, - old_leaf, - new_leaf, - invalid_index_in_range, - &mut proof_clone, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::InvalidProof(_, _)) - )); - - // Try pointing to the leaf indices outside the range only if the tree is - // not full. Otherwise, it doesn't make sense and even `gen_range` will - // fail. - let next_index = merkle_tree.next_index(); - let limit_leaves = 1 << HEIGHT; - if next_index < limit_leaves { - let invalid_index_outside_range = rng.gen_range(next_index..limit_leaves); - let mut proof_clone = proof.clone(); - let res = merkle_tree.update( - changelog_index, - old_leaf, - new_leaf, - invalid_index_outside_range, - &mut proof_clone, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::CannotUpdateEmpty) - )); - } -} - -/// Tests whether update operations work as expected. -fn update() -where - H: Hasher, -{ - const HEIGHT: usize = 4; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - let leaf1 = H::hash(&[1u8; 32]).unwrap(); - let leaf2 = H::hash(&[2u8; 32]).unwrap(); - let leaf3 = H::hash(&[3u8; 32]).unwrap(); - let leaf4 = H::hash(&[4u8; 32]).unwrap(); - - // Append 4 leaves. - // - // R - // / \ - // H4 Z[3] - // / \ - // H3 Z[2] - // / \ - // H1 H2 - // / \ / \ - // L1 L2 L3 L4 - let h1 = H::hashv(&[&leaf1, &leaf2]).unwrap(); - let h2 = H::hashv(&[&leaf3, &leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(leaf4), Some(h2), Some(h3), Some(h4)]); - let expected_filled_subtrees = BoundedVec::from_array(&[leaf3, h1, h3, h4]); - - merkle_tree.append(&leaf1).unwrap(); - reference_tree.append(&leaf1).unwrap(); - merkle_tree.append(&leaf2).unwrap(); - reference_tree.append(&leaf2).unwrap(); - merkle_tree.append(&leaf3).unwrap(); - reference_tree.append(&leaf3).unwrap(); - merkle_tree.append(&leaf4).unwrap(); - reference_tree.append(&leaf4).unwrap(); - - let canopy_levels = [ - &[h4, H::zero_bytes()[3]][..], - &[ - h3, - H::zero_bytes()[2], - H::zero_bytes()[2], - H::zero_bytes()[2], - ][..], - ]; - let mut expected_canopy = Vec::new(); - - for canopy_level in canopy_levels[..CANOPY].iter() { - expected_canopy.extend_from_slice(canopy_level); - } - - assert_eq!(merkle_tree.changelog_index(), 4 % CHANGELOG); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 3), - ); - - assert_eq!(merkle_tree.root(), reference_tree.root()); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 4); - assert_eq!(merkle_tree.filled_subtrees, expected_filled_subtrees); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), leaf4); - assert_eq!(merkle_tree.canopy, reference_tree.get_canopy().unwrap()); - assert_eq!(merkle_tree.canopy.as_slice(), expected_canopy.as_slice()); - - // Replace `leaf1`. - let new_leaf1 = [9u8; 32]; - - // Replacing L1 affects H1 and all parent hashes up to the root. - // - // R - // / \ - // *H4* Z[3] - // / \ - // *H3* Z[2] - // / \ - // *H1* H2 - // / \ / \ - // *L1* L2 L3 L4 - // - // Merkle proof for the replaced leaf L1 is: - // [L2, H2, Z[2], Z[3]] - let changelog_index = merkle_tree.changelog_index(); - - let proof_raw = &[leaf2, h2, H::zero_bytes()[2], H::zero_bytes()[3]]; - let mut proof = BoundedVec::with_capacity(HEIGHT); - for node in &proof_raw[..HEIGHT - CANOPY] { - proof.push(*node).unwrap(); - } - - invalid_updates::( - &mut rng, - &mut merkle_tree, - changelog_index, - &leaf1, - &new_leaf1, - 0, - proof.clone(), - ); - merkle_tree - .update(changelog_index, &leaf1, &new_leaf1, 0, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf1, 0).unwrap(); - - let h1 = H::hashv(&[&new_leaf1, &leaf2]).unwrap(); - let h2 = H::hashv(&[&leaf3, &leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(new_leaf1), Some(h1), Some(h3), Some(h4)]); - - let canopy_levels = [ - &[h4, H::zero_bytes()[3]][..], - &[ - h3, - H::zero_bytes()[2], - H::zero_bytes()[2], - H::zero_bytes()[2], - ][..], - ]; - let mut expected_canopy = Vec::new(); - for canopy_level in canopy_levels[..CANOPY].iter() { - expected_canopy.extend_from_slice(canopy_level); - } - - assert_eq!(merkle_tree.changelog_index(), 5 % CHANGELOG); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 0), - ); - - assert_eq!(merkle_tree.root(), reference_tree.root()); - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 5); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), leaf4); - assert_eq!(merkle_tree.canopy, reference_tree.get_canopy().unwrap()); - assert_eq!(merkle_tree.canopy.as_slice(), expected_canopy.as_slice()); - - // Replace `leaf2`. - let new_leaf2 = H::hash(&[8u8; 32]).unwrap(); - - // Replacing L2 affects H1 and all parent hashes up to the root. - // - // R - // / \ - // *H4* Z[3] - // / \ - // *H3* Z[2] - // / \ - // *H1* H2 - // / \ / \ - // L1 *L2* L3 L4 - // - // Merkle proof for the replaced leaf L2 is: - // [L1, H2, Z[2], Z[3]] - let changelog_index = merkle_tree.changelog_index(); - - let proof_raw = &[new_leaf1, h2, H::zero_bytes()[2], H::zero_bytes()[3]]; - let mut proof = BoundedVec::with_capacity(HEIGHT); - for node in &proof_raw[..HEIGHT - CANOPY] { - proof.push(*node).unwrap(); - } - - invalid_updates::( - &mut rng, - &mut merkle_tree, - changelog_index, - &leaf2, - &new_leaf2, - 1, - proof.clone(), - ); - merkle_tree - .update(changelog_index, &leaf2, &new_leaf2, 1, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf2, 1).unwrap(); - - let h1 = H::hashv(&[&new_leaf1, &new_leaf2]).unwrap(); - let h2 = H::hashv(&[&leaf3, &leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(new_leaf2), Some(h1), Some(h3), Some(h4)]); - - let canopy_levels = [ - &[h4, H::zero_bytes()[3]][..], - &[ - h3, - H::zero_bytes()[2], - H::zero_bytes()[2], - H::zero_bytes()[2], - ][..], - ]; - let mut expected_canopy = Vec::new(); - for canopy_level in canopy_levels[..CANOPY].iter() { - expected_canopy.extend_from_slice(canopy_level); - } - - assert_eq!(merkle_tree.changelog_index(), 6 % CHANGELOG); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 1), - ); - - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 6); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), leaf4); - assert_eq!(merkle_tree.canopy, reference_tree.get_canopy().unwrap()); - assert_eq!(merkle_tree.canopy.as_slice(), expected_canopy.as_slice()); - - // Replace `leaf3`. - let new_leaf3 = H::hash(&[7u8; 32]).unwrap(); - - // Replacing L3 affects H1 and all parent hashes up to the root. - // - // R - // / \ - // *H4* Z[3] - // / \ - // *H3* Z[2] - // / \ - // H1 *H2* - // / \ / \ - // L1 L2 *L3* L4 - // - // Merkle proof for the replaced leaf L3 is: - // [L4, H1, Z[2], Z[3]] - let changelog_index = merkle_tree.changelog_index(); - - let proof_raw = &[leaf4, h1, H::zero_bytes()[2], H::zero_bytes()[3]]; - let mut proof = BoundedVec::with_capacity(HEIGHT); - for node in &proof_raw[..HEIGHT - CANOPY] { - proof.push(*node).unwrap(); - } - - invalid_updates::( - &mut rng, - &mut merkle_tree, - changelog_index, - &leaf3, - &new_leaf3, - 2, - proof.clone(), - ); - merkle_tree - .update(changelog_index, &leaf3, &new_leaf3, 2, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf3, 2).unwrap(); - - let h1 = H::hashv(&[&new_leaf1, &new_leaf2]).unwrap(); - let h2 = H::hashv(&[&new_leaf3, &leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(new_leaf3), Some(h2), Some(h3), Some(h4)]); - - let canopy_levels = [ - &[h4, H::zero_bytes()[3]][..], - &[ - h3, - H::zero_bytes()[2], - H::zero_bytes()[2], - H::zero_bytes()[2], - ][..], - ]; - let mut expected_canopy = Vec::new(); - for canopy_level in canopy_levels[..CANOPY].iter() { - expected_canopy.extend_from_slice(canopy_level); - } - - assert_eq!(merkle_tree.changelog_index(), 7 % CHANGELOG); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 2) - ); - - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 7); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), leaf4); - assert_eq!(merkle_tree.canopy, reference_tree.get_canopy().unwrap()); - assert_eq!(merkle_tree.canopy.as_slice(), expected_canopy.as_slice()); - - // Replace `leaf4`. - let new_leaf4 = H::hash(&[6u8; 32]).unwrap(); - - // Replacing L4 affects H1 and all parent hashes up to the root. - // - // R - // / \ - // *H4* Z[3] - // / \ - // *H3* Z[2] - // / \ - // H1 *H2* - // / \ / \ - // L1 L2 L3 *L4* - // - // Merkle proof for the replaced leaf L4 is: - // [L3, H1, Z[2], Z[3]] - let changelog_index = merkle_tree.changelog_index(); - - let proof_raw = &[new_leaf3, h1, H::zero_bytes()[2], H::zero_bytes()[3]]; - let mut proof = BoundedVec::with_capacity(HEIGHT); - for node in &proof_raw[..HEIGHT - CANOPY] { - proof.push(*node).unwrap(); - } - - invalid_updates::( - &mut rng, - &mut merkle_tree, - changelog_index, - &leaf4, - &new_leaf4, - 3, - proof.clone(), - ); - merkle_tree - .update(changelog_index, &leaf4, &new_leaf4, 3, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf4, 3).unwrap(); - - let h1 = H::hashv(&[&new_leaf1, &new_leaf2]).unwrap(); - let h2 = H::hashv(&[&new_leaf3, &new_leaf4]).unwrap(); - let h3 = H::hashv(&[&h1, &h2]).unwrap(); - let h4 = H::hashv(&[&h3, &H::zero_bytes()[2]]).unwrap(); - let expected_root = H::hashv(&[&h4, &H::zero_bytes()[3]]).unwrap(); - let expected_changelog_path = ChangelogPath([Some(new_leaf4), Some(h2), Some(h3), Some(h4)]); - - let canopy_levels = [ - &[h4, H::zero_bytes()[3]][..], - &[ - h3, - H::zero_bytes()[2], - H::zero_bytes()[2], - H::zero_bytes()[2], - ][..], - ]; - let mut expected_canopy = Vec::new(); - for canopy_level in canopy_levels[..CANOPY].iter() { - expected_canopy.extend_from_slice(canopy_level); - } - - assert_eq!(merkle_tree.changelog_index(), 8 % CHANGELOG); - assert_eq!( - merkle_tree.changelog[merkle_tree.changelog_index()], - ChangelogEntry::new(expected_changelog_path, 3) - ); - - assert_eq!(merkle_tree.root(), expected_root); - assert_eq!(merkle_tree.roots.last_index(), 8); - assert_eq!(merkle_tree.next_index(), 4); - assert_eq!(merkle_tree.rightmost_leaf(), new_leaf4); - assert_eq!(merkle_tree.canopy, reference_tree.get_canopy().unwrap()); - assert_eq!(merkle_tree.canopy.as_slice(), expected_canopy.as_slice()); -} - -/// Tests whether appending leaves over the limit results in an explicit error. -fn overfill_tree() -where - H: Hasher, -{ - const HEIGHT: usize = 2; - const CHANGELOG: usize = 32; - const ROOTS: usize = 32; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - for _ in 0..4 { - merkle_tree.append(&[4; 32]).unwrap(); - } - assert!(matches!( - merkle_tree.append(&[4; 32]), - Err(ConcurrentMerkleTreeError::TreeFull) - )); -} - -/// Tests whether performing enough updates to overfill the changelog and root -/// buffer results in graceful reset of the counters. -fn overfill_changelog_and_roots() -where - H: Hasher, -{ - const HEIGHT: usize = 2; - const CHANGELOG: usize = 6; - const ROOTS: usize = 8; - const CANOPY: usize = 0; - - // Our implementation of concurrent Merkle tree. - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - // Reference implementation of Merkle tree which Solana Labs uses for - // testing (and therefore, we as well). We use it mostly to get the Merkle - // proofs. - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - // Fill up the tree, producing 4 roots and changelog entries. - for _ in 0..(1 << HEIGHT) { - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - merkle_tree.append(&leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - } - - assert_eq!(merkle_tree.changelog.last_index(), 4); - assert_eq!(merkle_tree.roots.last_index(), 4); - - // Update 2 leaves to fill up the changelog. Its counter should reach the - // modulus and get reset. - for i in 0..2 { - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let changelog_index = merkle_tree.changelog_index(); - let old_leaf = reference_tree.leaf(i); - let mut proof = reference_tree.get_proof_of_leaf(i, false).unwrap(); - - merkle_tree - .update(changelog_index, &old_leaf, &new_leaf, i, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf, i).unwrap(); - } - - assert_eq!(merkle_tree.changelog.last_index(), 0); - assert_eq!(merkle_tree.roots.last_index(), 6); - - // Update another 2 leaves to fill up the root. Its counter should reach - // the modulus and get reset. The previously reset counter should get - // incremented. - for i in 0..2 { - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let changelog_index = merkle_tree.changelog_index(); - let old_leaf = reference_tree.leaf(i); - let mut proof = reference_tree.get_proof_of_leaf(i, false).unwrap(); - - merkle_tree - .update(changelog_index, &old_leaf, &new_leaf, i, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf, i).unwrap(); - } - - assert_eq!(merkle_tree.changelog.last_index(), 2); - assert_eq!(merkle_tree.roots.last_index(), 0); - - // The latter updates should keep incrementing the counters. - for i in 0..3 { - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let changelog_index = merkle_tree.changelog_index(); - let old_leaf = reference_tree.leaf(i); - let mut proof = reference_tree.get_proof_of_leaf(i, false).unwrap(); - - merkle_tree - .update(changelog_index, &old_leaf, &new_leaf, i, &mut proof) - .unwrap(); - reference_tree.update(&new_leaf, i).unwrap(); - } - - assert_eq!(merkle_tree.changelog.last_index(), 5); - assert_eq!(merkle_tree.roots.last_index(), 3); -} - -/// Checks whether `append_batch` is compatible with equivalent multiple -/// appends. -fn compat_batch() -where - H: Hasher, -{ - const CHANGELOG: usize = 64; - const ROOTS: usize = 256; - - let mut rng = thread_rng(); - - let batch_limit = cmp::min(1 << HEIGHT, CHANGELOG); - for batch_size in 1..batch_limit { - let mut concurrent_mt_1 = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - concurrent_mt_1.init().unwrap(); - - // Tree to which are going to append single leaves. - let mut concurrent_mt_2 = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - concurrent_mt_2.init().unwrap(); - - // Reference tree for checking the correctness of proofs. - let mut reference_mt = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let leaves: Vec<[u8; 32]> = (0..batch_size) - .map(|_| { - Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap() - }) - .collect(); - let leaves: Vec<&[u8; 32]> = leaves.iter().collect(); - - // Append leaves to all Merkle tree implementations. - - // Batch append. - concurrent_mt_1.append_batch(leaves.as_slice()).unwrap(); - - // Singular appends. - for leaf in leaves.iter() { - concurrent_mt_2.append(leaf).unwrap(); - } - - // Singular appends to reference MT. - for leaf in leaves.iter() { - reference_mt.append(leaf).unwrap(); - } - - // Check whether roots are the same. - // Skip roots which are an output of singular, non-terminal - // appends - we don't compute them in batch appends and instead, - // emit a "zero root" (just to appease the clients assuming that - // root index is equal to sequence number). - assert_eq!( - concurrent_mt_1 - .roots - .iter() - .step_by(batch_size) - .collect::>() - .as_slice(), - concurrent_mt_2 - .roots - .iter() - .step_by(batch_size) - .collect::>() - .as_slice() - ); - assert_eq!(concurrent_mt_1.root(), reference_mt.root()); - assert_eq!(concurrent_mt_2.root(), reference_mt.root()); - } -} - -fn batch_greater_than_changelog() -where - H: Hasher, -{ - const CHANGELOG: usize = 64; - const ROOTS: usize = 256; - - let mut rng = thread_rng(); - - let mut concurrent_mt = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - concurrent_mt.init().unwrap(); - - for batch_size in (CHANGELOG + 1)..(1 << HEIGHT) { - let leaves: Vec<[u8; 32]> = (0..batch_size) - .map(|_| { - Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap() - }) - .collect(); - let leaves: Vec<&[u8; 32]> = leaves.iter().collect(); - - assert!(matches!( - concurrent_mt.append_batch(leaves.as_slice()), - Err(ConcurrentMerkleTreeError::BatchGreaterThanChangelog(_, _)), - )); - } -} - -fn compat_canopy() -where - H: Hasher, -{ - const CHANGELOG: usize = 64; - const ROOTS: usize = 256; - - let mut rng = thread_rng(); - - for canopy_depth in 1..(HEIGHT + 1) { - let batch_limit = cmp::min(1 << HEIGHT, CHANGELOG); - for batch_size in 1..batch_limit { - let mut concurrent_mt_with_canopy = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, canopy_depth) - .unwrap(); - concurrent_mt_with_canopy.init().unwrap(); - - let mut concurrent_mt_without_canopy = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, 0).unwrap(); - concurrent_mt_without_canopy.init().unwrap(); - - let mut reference_mt_with_canopy = - light_merkle_tree_reference::MerkleTree::::new(HEIGHT, canopy_depth); - let mut reference_mt_without_canopy = - light_merkle_tree_reference::MerkleTree::::new(HEIGHT, 0); - - for batch_i in 0..((1 << HEIGHT) / batch_size) { - let leaves: Vec<[u8; 32]> = (0..batch_size) - .map(|_| { - Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap() - }) - .collect(); - let leaves: Vec<&[u8; 32]> = leaves.iter().collect(); - - concurrent_mt_with_canopy - .append_batch(leaves.as_slice()) - .unwrap(); - concurrent_mt_without_canopy - .append_batch(leaves.as_slice()) - .unwrap(); - - for leaf in leaves { - reference_mt_with_canopy.append(leaf).unwrap(); - reference_mt_without_canopy.append(leaf).unwrap(); - } - - for leaf_i in 0..batch_size { - let leaf_index = (batch_i * batch_size) + leaf_i; - - let mut proof_with_canopy = reference_mt_with_canopy - .get_proof_of_leaf(leaf_index, false) - .unwrap(); - let proof_without_canopy = reference_mt_without_canopy - .get_proof_of_leaf(leaf_index, true) - .unwrap(); - - assert_eq!( - proof_with_canopy[..], - proof_without_canopy[..HEIGHT - canopy_depth] - ); - - concurrent_mt_with_canopy - .update_proof_from_canopy(leaf_index, &mut proof_with_canopy) - .unwrap(); - - assert_eq!(proof_with_canopy, proof_without_canopy) - } - } - } - } -} - -#[test] -fn test_append_keccak_canopy_0() { - append::() -} - -#[test] -fn test_append_poseidon_canopy_0() { - append::() -} - -#[test] -fn test_append_sha256_canopy_0() { - append::() -} - -#[test] -fn test_append_with_proof_keccak_4_16_16_0_16() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_poseidon_4_16_16_0_16() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_sha256_4_16_16_0_16() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_keccak_26_1400_2800_0_200() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_poseidon_26_1400_2800_0_200() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_sha256_26_1400_2800_0_200() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_keccak_26_1400_2800_10_200() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_poseidon_26_1400_2800_10_200() { - append_with_proof::() -} - -#[test] -fn test_append_with_proof_sha256_26_1400_2800_10_200() { - append_with_proof::() -} - -#[test] -fn test_update_keccak_height_4_changelog_1_roots_256_canopy_0() { - update::() -} - -#[test] -fn test_update_keccak_height_4_changelog_1_roots_256_canopy_1() { - update::() -} - -#[test] -fn test_update_keccak_height_4_changelog_1_roots_256_canopy_2() { - update::() -} - -#[test] -fn test_update_keccak_height_4_changelog_32_roots_256_canopy_0() { - update::() -} - -#[test] -fn test_update_keccak_height_4_changelog_32_roots_256_canopy_1() { - update::() -} - -#[test] -fn test_update_keccak_height_4_changelog_32_roots_256_canopy_2() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_1_roots_256_canopy_0() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_1_roots_256_canopy_1() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_1_roots_256_canopy_2() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_32_roots_256_canopy_0() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_32_roots_256_canopy_1() { - update::() -} - -#[test] -fn test_update_poseidon_height_4_changelog_32_roots_256_canopy_2() { - update::() -} - -#[test] -fn test_update_sha256_height_4_changelog_32_roots_256_canopy_0() { - update::() -} - -#[test] -fn test_update_sha256_height_4_changelog_32_roots_256_canopy_1() { - update::() -} - -#[test] -fn test_update_sha256_height_4_changelog_32_roots_256_canopy_2() { - update::() -} - -#[test] -fn test_overfill_tree_keccak() { - overfill_tree::() -} - -#[test] -fn test_overfill_tree_poseidon() { - overfill_tree::() -} - -#[test] -fn test_overfill_tree_sha256() { - overfill_tree::() -} - -#[test] -fn test_overfill_changelog_keccak() { - overfill_changelog_and_roots::() -} - -#[test] -fn test_compat_batch_keccak_8_canopy_0() { - const HEIGHT: usize = 8; - const CANOPY: usize = 0; - compat_batch::() -} - -#[test] -fn test_compat_batch_poseidon_3_canopy_0() { - const HEIGHT: usize = 3; - const CANOPY: usize = 0; - compat_batch::() -} - -#[test] -fn test_compat_batch_poseidon_6_canopy_0() { - const HEIGHT: usize = 6; - const CANOPY: usize = 0; - compat_batch::() -} - -#[test] -fn test_compat_batch_sha256_8_canopy_0() { - const HEIGHT: usize = 8; - const CANOPY: usize = 0; - compat_batch::() -} - -#[cfg(feature = "heavy-tests")] -#[test] -fn test_compat_batch_keccak_16() { - const HEIGHT: usize = 16; - const CANOPY: usize = 0; - compat_batch::() -} - -#[cfg(feature = "heavy-tests")] -#[test] -fn test_compat_batch_poseidon_16() { - const HEIGHT: usize = 16; - const CANOPY: usize = 0; - compat_batch::() -} - -#[cfg(feature = "heavy-tests")] -#[test] -fn test_compat_batch_sha256_16() { - const HEIGHT: usize = 16; - const CANOPY: usize = 0; - compat_batch::() -} - -#[test] -fn test_batch_greater_than_changelog_keccak_8_canopy_0() { - const HEIGHT: usize = 8; - const CANOPY: usize = 0; - batch_greater_than_changelog::() -} - -#[test] -fn test_batch_greater_than_changelog_poseidon_8_canopy_0() { - const HEIGHT: usize = 8; - const CANOPY: usize = 0; - batch_greater_than_changelog::() -} - -#[test] -fn test_batch_greater_than_changelog_sha256_8_canopy_0() { - const HEIGHT: usize = 8; - const CANOPY: usize = 0; - batch_greater_than_changelog::() -} - -#[test] -fn test_batch_greater_than_changelog_keccak_8_canopy_4() { - const HEIGHT: usize = 8; - const CANOPY: usize = 4; - batch_greater_than_changelog::() -} - -#[test] -fn test_batch_greater_than_changelog_poseidon_6_canopy_3() { - const HEIGHT: usize = 6; - const CANOPY: usize = 3; - batch_greater_than_changelog::() -} - -#[test] -fn test_batch_greater_than_changelog_sha256_8_canopy_4() { - const HEIGHT: usize = 8; - const CANOPY: usize = 4; - batch_greater_than_changelog::() -} - -#[test] -fn test_compat_canopy_keccak_8() { - const HEIGHT: usize = 8; - compat_canopy::() -} - -#[test] -fn test_compat_canopy_poseidon_6() { - const HEIGHT: usize = 6; - compat_canopy::() -} - -#[cfg(feature = "heavy-tests")] -#[test] -fn test_compat_canopy_poseidon_26() { - const HEIGHT: usize = 26; - compat_canopy::() -} - -#[test] -fn test_compat_canopy_sha256_8() { - const HEIGHT: usize = 8; - compat_canopy::() -} - -/// Compares the internal fields of concurrent Merkle tree implementations, to -/// ensure their consistency. -fn compare_trees( - concurrent_mt: &ConcurrentMerkleTree, - spl_concurrent_mt: &spl_concurrent_merkle_tree::concurrent_merkle_tree::ConcurrentMerkleTree< - HEIGHT, - MAX_ROOTS, - >, -) where - H: Hasher, -{ - for i in 0..concurrent_mt.changelog.len() { - let changelog_entry = concurrent_mt.changelog[i].clone(); - let spl_changelog_entry = spl_concurrent_mt.change_logs[i]; - for j in 0..HEIGHT { - let changelog_node = changelog_entry.path[j].unwrap(); - let spl_changelog_node = spl_changelog_entry.path[j]; - assert_eq!(changelog_node, spl_changelog_node); - } - assert_eq!(changelog_entry.index, spl_changelog_entry.index as u64); - } - assert_eq!( - concurrent_mt.changelog.last_index(), - spl_concurrent_mt.active_index as usize - ); - assert_eq!(concurrent_mt.root(), spl_concurrent_mt.get_root()); - for i in 0..concurrent_mt.roots.len() { - assert_eq!( - concurrent_mt.roots[i], - spl_concurrent_mt.change_logs[i].root - ); - } - assert_eq!( - concurrent_mt.roots.last_index(), - spl_concurrent_mt.active_index as usize - ); - assert_eq!( - concurrent_mt.next_index(), - spl_concurrent_mt.rightmost_proof.index as usize - ); - assert_eq!( - concurrent_mt.rightmost_leaf(), - spl_concurrent_mt.rightmost_proof.leaf - ); -} - -/// Checks whether our `append` and `update` implementations are compatible -/// with `append` and `set_leaf` from `spl-concurrent-merkle-tree` crate. -#[tokio::test(flavor = "multi_thread")] -async fn test_spl_compat() { - const HEIGHT: usize = 4; - const CHANGELOG: usize = 64; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let mut rng = thread_rng(); - - // Our implementation of concurrent Merkle tree. - let mut concurrent_mt = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - concurrent_mt.init().unwrap(); - - // Solana Labs implementation of concurrent Merkle tree. - let mut spl_concurrent_mt = spl_concurrent_merkle_tree::concurrent_merkle_tree::ConcurrentMerkleTree::::new(); - spl_concurrent_mt.initialize().unwrap(); - - // Reference implemenetation of Merkle tree which Solana Labs uses for - // testing (and therefore, we as well). We use it mostly to get the Merkle - // proofs. - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - for i in 0..(1 << HEIGHT) { - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - concurrent_mt.append(&leaf).unwrap(); - spl_concurrent_mt.append(leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - - compare_trees(&concurrent_mt, &spl_concurrent_mt); - - // For every appended leaf with index greater than 0, update the leaf 0. - // This is done in indexed Merkle trees[0] and it's a great test case - // for rightmost proof updates. - // - // [0] https://docs.aztec.network/concepts/advanced/data_structures/indexed_merkle_tree - if i > 0 { - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let root = concurrent_mt.root(); - let changelog_index = concurrent_mt.changelog_index(); - let old_leaf = reference_tree.leaf(0); - let mut proof = reference_tree.get_proof_of_leaf(0, false).unwrap(); - - concurrent_mt - .update(changelog_index, &old_leaf, &new_leaf, 0, &mut proof) - .unwrap(); - spl_concurrent_mt - .set_leaf(root, old_leaf, new_leaf, proof.as_slice(), 0_u32) - .unwrap(); - reference_tree.update(&new_leaf, 0).unwrap(); - - compare_trees(&concurrent_mt, &spl_concurrent_mt); - } - } - - for i in 0..(1 << HEIGHT) { - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let root = concurrent_mt.root(); - let changelog_index = concurrent_mt.changelog_index(); - let old_leaf = reference_tree.leaf(i); - let mut proof = reference_tree.get_proof_of_leaf(i, false).unwrap(); - - concurrent_mt - .update(changelog_index, &old_leaf, &new_leaf, i, &mut proof) - .unwrap(); - spl_concurrent_mt - .set_leaf(root, old_leaf, new_leaf, proof.as_slice(), i as u32) - .unwrap(); - reference_tree.update(&new_leaf, i).unwrap(); - - compare_trees(&concurrent_mt, &spl_concurrent_mt); - } -} - -fn from_bytes< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, ->() -where - H: Hasher, -{ - let mut bytes = - vec![ - 0u8; - ConcurrentMerkleTree::::size_in_account(HEIGHT, CHANGELOG, ROOTS, CANOPY) - ]; - - let mut rng = thread_rng(); - let mut reference_tree_1 = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - // Vector of changelog indices after each operation. - let mut leaf_indices = CyclicBoundedVec::with_capacity(CHANGELOG); - // Vector of roots after each operation. - let mut roots = CyclicBoundedVec::with_capacity(CHANGELOG); - // Vector of merkle paths we get from the reference tree after each operation. - let mut merkle_paths = CyclicBoundedVec::with_capacity(CHANGELOG); - // Changelog is always initialized with a changelog path consisting of zero - // bytes. For consistency, we need to assert the 1st zero byte as the first - // expected leaf in the changelog. - let merkle_path = reference_tree_1.get_path_of_leaf(0, true).unwrap(); - leaf_indices.push(0); - merkle_paths.push(merkle_path); - - { - let mut merkle_tree = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - bytes.as_mut_slice(), - HEIGHT, - CANOPY, - CHANGELOG, - ROOTS, - ) - .unwrap(); - merkle_tree.init().unwrap(); - roots.push(merkle_tree.root()); - } - - let mut reference_tree_2 = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - reference_tree_2.init().unwrap(); - - // Try to make the tree full. After each append, update a random leaf. - // Reload the tree from bytes after each action. - for _ in 0..(1 << HEIGHT) { - // Reload the tree. - let mut merkle_tree = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - bytes.as_mut_slice(), - ) - .unwrap(); - - // Append leaf. - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let leaf_index = merkle_tree.next_index(); - merkle_tree.append(&leaf).unwrap(); - reference_tree_1.append(&leaf).unwrap(); - reference_tree_2.append(&leaf).unwrap(); - - leaf_indices.push(leaf_index); - roots.push(merkle_tree.root()); - let merkle_path = reference_tree_1.get_path_of_leaf(leaf_index, true).unwrap(); - merkle_paths.push(merkle_path); - - assert_eq!( - merkle_tree.filled_subtrees.iter().collect::>(), - reference_tree_2.filled_subtrees.iter().collect::>() - ); - assert_eq!( - merkle_tree.changelog.iter().collect::>(), - reference_tree_2.changelog.iter().collect::>() - ); - assert_eq!( - merkle_tree.roots.iter().collect::>(), - reference_tree_2.roots.iter().collect::>() - ); - assert_eq!( - merkle_tree.canopy.iter().collect::>(), - reference_tree_2.canopy.iter().collect::>() - ); - assert_eq!(merkle_tree.root(), reference_tree_1.root()); - - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog.first_index()) - .unwrap() - .collect::>(); - assert_eq!(changelog_entries.len(), merkle_paths.len() - 1); - - for ((leaf_index, merkle_path), changelog_entry) in leaf_indices - .iter() - .skip(1) - .zip(merkle_paths.iter().skip(1)) - .zip(changelog_entries) - { - assert_eq!(changelog_entry.index, *leaf_index as u64); - for i in 0..HEIGHT { - let changelog_node = changelog_entry.path[i].unwrap(); - let path_node = merkle_path[i]; - assert_eq!(changelog_node, path_node); - } - } - - for (root_1, root_2) in merkle_tree.roots.iter().zip(roots.iter()) { - assert_eq!(root_1, root_2); - } - - // Update random leaf. - let leaf_index = rng.gen_range(0..reference_tree_1.leaves().len()); - let old_leaf = reference_tree_1.leaf(leaf_index); - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof = reference_tree_1 - .get_proof_of_leaf(leaf_index, false) - .unwrap(); - let changelog_index = merkle_tree.changelog_index(); - merkle_tree - .update( - changelog_index, - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - reference_tree_1.update(&new_leaf, leaf_index).unwrap(); - reference_tree_2 - .update( - changelog_index, - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - - assert_eq!(merkle_tree.root(), reference_tree_1.root()); - - leaf_indices.push(leaf_index); - roots.push(merkle_tree.root()); - let merkle_path = reference_tree_1.get_path_of_leaf(leaf_index, true).unwrap(); - merkle_paths.push(merkle_path); - - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog.first_index()) - .unwrap() - .collect::>(); - assert_eq!(changelog_entries.len(), merkle_paths.len() - 1); - - for ((leaf_index, merkle_path), changelog_entry) in leaf_indices - .iter() - .skip(1) - .zip(merkle_paths.iter().skip(1)) - .zip(changelog_entries) - { - assert_eq!(changelog_entry.index, *leaf_index as u64); - for i in 0..HEIGHT { - let changelog_node = changelog_entry.path[i].unwrap(); - let path_node = merkle_path[i]; - assert_eq!(changelog_node, path_node); - } - } - - for (root_1, root_2) in merkle_tree.roots.iter().zip(roots.iter()) { - assert_eq!(root_1, root_2); - } - } - - // Keep updating random leaves in loop. - for _ in 0..1000 { - // Reload the tree. - let mut merkle_tree = - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - bytes.as_mut_slice(), - ) - .unwrap(); - - // Update random leaf. - let leaf_index = rng.gen_range(0..reference_tree_1.leaves().len()); - let old_leaf = reference_tree_1.leaf(leaf_index); - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof = reference_tree_1 - .get_proof_of_leaf(leaf_index, false) - .unwrap(); - let changelog_index = merkle_tree.changelog_index(); - merkle_tree - .update( - changelog_index, - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - reference_tree_1.update(&new_leaf, leaf_index).unwrap(); - reference_tree_2 - .update( - changelog_index, - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - - assert_eq!(merkle_tree.root(), reference_tree_1.root()); - - leaf_indices.push(leaf_index); - roots.push(merkle_tree.root()); - let merkle_path = reference_tree_1.get_path_of_leaf(leaf_index, true).unwrap(); - merkle_paths.push(merkle_path); - - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog.first_index()) - .unwrap() - .collect::>(); - assert_eq!(changelog_entries.len(), merkle_paths.len() - 1); - - for ((leaf_index, merkle_path), changelog_entry) in leaf_indices - .iter() - .skip(1) - .zip(merkle_paths.iter().skip(1)) - .zip(changelog_entries) - { - assert_eq!(changelog_entry.index, *leaf_index as u64); - for i in 0..HEIGHT { - let changelog_node = changelog_entry.path[i].unwrap(); - let path_node = merkle_path[i]; - assert_eq!(changelog_node, path_node); - } - } - - for (root_1, root_2) in merkle_tree.roots.iter().zip(roots.iter()) { - assert_eq!(root_1, root_2); - } - } -} - -#[test] -fn test_from_bytes_keccak_8_256_256() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - from_bytes::() -} - -#[test] -fn test_from_bytes_poseidon_8_256_256() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - from_bytes::() -} - -#[test] -fn test_from_bytes_sha256_8_256_256_0() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - from_bytes::() -} - -/// Tests the buffer size checks. Buffer size checks should fail any time that -/// a provided byte slice is smaller than the expected size indicated by the -/// tree metadata (height, changelog size, roots size etc.). -/// -/// In case of `from_bytes_zero_copy_init`, the metadata are provided with an -/// intention of initializing them. The provided parameters influence the -/// size checks. -/// -/// In case of `from_bytes_zero_copy_mut`, the metadata are read from the -/// buffer. Therefore, we end up with two phases of checks: -/// -/// 1. Check of the non-dynamic fields, including the metadata structs. -/// Based on size of all non-dynamic fields of `ConcurrentMerkleTree`. -/// 2. If the check was successful, metadata are being read from the buffer. -/// 3. After reading the metadata, we check the buffer size again, now to the -/// full extent, before actually using it. -fn buffer_error< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, ->() -where - H: Hasher, -{ - let valid_size = - ConcurrentMerkleTree::::size_in_account(HEIGHT, CHANGELOG, ROOTS, CANOPY); - - // Check that `from_bytes_zero_copy_init` checks the bounds. - for invalid_size in 1..valid_size { - let mut bytes = vec![0u8; invalid_size]; - let res = ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - &mut bytes, HEIGHT, CANOPY, CHANGELOG, ROOTS, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::BufferSize(_, _)) - )); - } - - // Initialize the tree correctly. - let mut bytes = vec![0u8; valid_size]; - ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - &mut bytes, HEIGHT, CANOPY, CHANGELOG, ROOTS, - ) - .unwrap(); - - // Check that `from_bytes_zero_copy` mut checks the bounds based on the - // metadata in already existing Merkle tree. - for invalid_size in 1..valid_size { - let bytes = &mut bytes[..invalid_size]; - let res = ConcurrentMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut(bytes); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::BufferSize(_, _)) - )); - } -} - -#[test] -fn test_buffer_error_keccak_8_256_256() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - buffer_error::() -} - -#[test] -fn test_buffer_error_poseidon_8_256_256() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - buffer_error::() -} - -#[test] -fn test_buffer_error_sha256_8_256_256_0() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - buffer_error::() -} - -fn height_zero() -where - H: Hasher, -{ - const HEIGHT: usize = 0; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let res = ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY); - assert!(matches!(res, Err(ConcurrentMerkleTreeError::HeightZero))); -} - -#[test] -fn test_height_zero_keccak() { - height_zero::() -} - -#[test] -fn test_height_zero_poseidon() { - height_zero::() -} - -#[test] -fn test_height_zero_sha256() { - height_zero::() -} - -fn changelog_zero() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const CHANGELOG: usize = 0; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let res = ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY); - assert!(matches!(res, Err(ConcurrentMerkleTreeError::ChangelogZero))); -} - -#[test] -fn test_changelog_zero_keccak() { - changelog_zero::() -} - -#[test] -fn test_changelog_zero_poseidon() { - changelog_zero::() -} - -#[test] -fn test_changelog_zero_sha256() { - changelog_zero::() -} - -fn roots_zero() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 0; - const CANOPY: usize = 0; - - let res = ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY); - assert!(matches!(res, Err(ConcurrentMerkleTreeError::RootsZero))); -} - -#[test] -fn test_roots_zero_keccak() { - roots_zero::() -} - -#[test] -fn test_roots_zero_poseidon() { - roots_zero::() -} - -#[test] -fn test_roots_zero_sha256() { - roots_zero::() -} - -fn update_with_invalid_proof( - merkle_tree: &mut ConcurrentMerkleTree, - proof_len: usize, -) where - H: Hasher, -{ - // It doesn't matter what values do we use. The proof length check - // should happend before checking its correctness. - let mut proof = BoundedVec::from_slice(vec![[5u8; 32]; proof_len].as_slice()); - - let res = merkle_tree.update( - merkle_tree.changelog_index(), - &H::zero_bytes()[0], - &[4u8; 32], - 0, - &mut proof, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::InvalidProofLength(_, _)) - )) -} - -fn invalid_proof_len() -where - H: Hasher, -{ - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - // Proof sizes lower than `height - canopy`. - for proof_len in 0..(HEIGHT - CANOPY) { - update_with_invalid_proof(&mut merkle_tree, proof_len); - } - // Proof sizes greater than `height - canopy`. - for proof_len in (HEIGHT - CANOPY + 1)..256 { - update_with_invalid_proof(&mut merkle_tree, proof_len); - } -} - -#[test] -fn test_invalid_proof_len_keccak_height_26_canopy_0() { - invalid_proof_len::() -} - -#[test] -fn test_invalid_proof_len_keccak_height_26_canopy_10() { - invalid_proof_len::() -} - -#[test] -fn test_invalid_proof_len_poseidon_height_26_canopy_0() { - invalid_proof_len::() -} - -#[test] -fn test_invalid_proof_len_poseidon_height_26_canopy_10() { - invalid_proof_len::() -} - -#[test] -fn test_invalid_proof_len_sha256_height_26_canopy_0() { - invalid_proof_len::() -} - -#[test] -fn test_invalid_proof_len_sha256_height_26_canopy_10() { - invalid_proof_len::() -} - -fn invalid_proof() -where - H: Hasher, -{ - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let old_leaf = [5u8; 32]; - merkle_tree.append(&old_leaf).unwrap(); - - let mut rng = thread_rng(); - - let mut invalid_proof = BoundedVec::with_capacity(HEIGHT); - for _ in 0..(HEIGHT - CANOPY) { - let node: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - invalid_proof.push(node).unwrap(); - } - - let res = merkle_tree.update( - merkle_tree.changelog_index(), - &old_leaf, - &[6u8; 32], - 0, - &mut invalid_proof, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::InvalidProof(_, _)) - )); -} - -#[test] -fn test_invalid_proof_keccak_height_26_canopy_0() { - invalid_proof::() -} - -#[test] -fn test_invalid_proof_keccak_height_26_canopy_10() { - invalid_proof::() -} - -#[test] -fn test_invalid_proof_poseidon_height_26_canopy_0() { - invalid_proof::() -} - -#[test] -fn test_invalid_proof_poseidon_height_26_canopy_10() { - invalid_proof::() -} - -#[test] -fn test_invalid_proof_sha256_height_26_canopy_0() { - invalid_proof::() -} - -#[test] -fn test_invalid_proof_sha256_height_26_canopy_10() { - invalid_proof::() -} - -fn update_empty() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - // Try updating all empty leaves in the empty tree. - let mut proof = BoundedVec::from_slice(&H::zero_bytes()[..HEIGHT]); - for leaf_index in 0..(1 << HEIGHT) { - let old_leaf = H::zero_bytes()[0]; - let new_leaf = [5u8; 32]; - - let res = merkle_tree.update( - merkle_tree.changelog_index(), - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::CannotUpdateEmpty) - )); - } -} - -#[test] -fn test_update_empty_keccak() { - update_empty::() -} - -#[test] -fn test_update_empty_poseidon() { - update_empty::() -} - -#[test] -fn test_update_empty_sha256() { - update_empty::() -} - -fn append_empty_batch() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let res = merkle_tree.append_batch(&[]); - assert!(matches!(res, Err(ConcurrentMerkleTreeError::EmptyLeaves))); -} - -#[test] -fn test_append_empty_batch_keccak() { - append_empty_batch::() -} - -#[test] -fn test_append_empty_batch_poseidon() { - append_empty_batch::() -} - -#[test] -fn test_append_empty_batch_sha256() { - append_empty_batch::() -} - -/// Reproducible only with Poseidon. Keccak and SHA256 don't return errors, as -/// they don't operate on a prime field. -#[test] -fn hasher_error() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - // Append a leaf which exceed the modulus. - let res = merkle_tree.append(&[255_u8; 32]); - assert!(matches!(res, Err(ConcurrentMerkleTreeError::Hasher(_)))); -} - -#[test] -pub fn test_100_nullify_mt() { - for iterations in 1..100 { - println!("iteration: {:?}", iterations); - let mut crank_merkle_tree = - light_merkle_tree_reference::MerkleTree::::new(26, 10); - let mut onchain_merkle_tree = - ConcurrentMerkleTree::::new(26, 10, 10, 10).unwrap(); - onchain_merkle_tree.init().unwrap(); - assert_eq!(onchain_merkle_tree.root(), crank_merkle_tree.root()); - - let mut queue = HashSet::new(6857, 2400).unwrap(); - let mut queue_indices = Vec::new(); - for i in 1..1 + iterations { - let mut leaf = [0; 32]; - leaf[31] = i as u8; - // onchain this is equivalent to append state (compressed pda program) - onchain_merkle_tree.append(&leaf).unwrap(); - crank_merkle_tree.append(&leaf).unwrap(); - // onchain the equivalent is nullify state (compressed pda program) - let leaf_bn = BigUint::from_be_bytes(&leaf); - queue.insert(&leaf_bn, 1).unwrap(); - let (_, index) = queue.find_element(&leaf_bn, None).unwrap().unwrap(); - queue_indices.push(index); - } - assert_eq!(onchain_merkle_tree.root(), crank_merkle_tree.root()); - assert_eq!( - onchain_merkle_tree.canopy, - crank_merkle_tree.get_canopy().unwrap() - ); - - let mut rng = rand::thread_rng(); - - // Pick random queue indices to nullify. - let queue_indices = queue_indices - .choose_multiple(&mut rng, cmp::min(9, iterations)) - .cloned() - .collect::>(); - - let change_log_index = onchain_merkle_tree.changelog_index(); - - let mut nullified_leaf_indices = Vec::with_capacity(queue_indices.len()); - - // Nullify the leaves we picked. - for queue_index in queue_indices { - let leaf_cell = queue.get_unmarked_bucket(queue_index).unwrap().unwrap(); - let leaf_index = crank_merkle_tree - .get_leaf_index(&leaf_cell.value_bytes()) - .unwrap(); - - let mut proof = crank_merkle_tree - .get_proof_of_leaf(leaf_index, false) - .unwrap(); - onchain_merkle_tree - .update( - change_log_index, - &leaf_cell.value_bytes(), - &[0u8; 32], - leaf_index, - &mut proof, - ) - .unwrap(); - - nullified_leaf_indices.push(leaf_index); - } - for leaf_index in nullified_leaf_indices { - crank_merkle_tree.update(&[0; 32], leaf_index).unwrap(); - } - assert_eq!(onchain_merkle_tree.root(), crank_merkle_tree.root()); - assert_eq!( - onchain_merkle_tree.canopy, - crank_merkle_tree.get_canopy().unwrap() - ); - } -} - -const LEAVES_WITH_NULLIFICATIONS: [([u8; 32], Option); 25] = [ - ( - [ - 9, 207, 75, 159, 247, 170, 46, 154, 178, 197, 60, 83, 191, 240, 137, 41, 36, 54, 242, - 50, 43, 48, 56, 220, 154, 217, 138, 19, 152, 123, 86, 8, - ], - None, - ), - ( - [ - 40, 10, 138, 159, 12, 188, 226, 84, 188, 92, 250, 11, 94, 240, 77, 158, 69, 219, 175, - 48, 248, 181, 216, 200, 54, 38, 12, 224, 155, 40, 23, 32, - ], - None, - ), - ( - [ - 11, 36, 94, 177, 195, 5, 4, 35, 75, 253, 31, 235, 68, 201, 79, 197, 199, 23, 214, 86, - 196, 2, 41, 249, 246, 138, 184, 248, 245, 66, 184, 244, - ], - None, - ), - ( - [ - 29, 3, 221, 195, 235, 46, 139, 171, 137, 7, 36, 118, 178, 198, 52, 20, 10, 131, 164, 5, - 116, 187, 118, 186, 34, 193, 46, 6, 5, 144, 82, 4, - ], - None, - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(0), - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(1), - ), - ( - [ - 6, 146, 149, 76, 49, 159, 84, 164, 203, 159, 181, 165, 21, 204, 111, 149, 87, 255, 46, - 82, 162, 181, 99, 178, 247, 27, 166, 174, 212, 39, 163, 106, - ], - None, - ), - ( - [ - 19, 135, 28, 172, 63, 129, 175, 101, 201, 97, 135, 147, 18, 78, 152, 243, 15, 154, 120, - 153, 92, 46, 245, 82, 67, 32, 224, 141, 89, 149, 162, 228, - ], - None, - ), - ( - [ - 4, 93, 251, 40, 246, 136, 132, 20, 175, 98, 3, 186, 159, 251, 128, 159, 219, 172, 67, - 20, 69, 19, 66, 193, 232, 30, 121, 19, 193, 177, 143, 6, - ], - None, - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(3), - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(4), - ), - ( - [ - 34, 229, 118, 4, 68, 219, 118, 228, 117, 70, 150, 93, 208, 215, 51, 243, 123, 48, 39, - 228, 206, 194, 200, 232, 35, 133, 166, 222, 118, 217, 122, 228, - ], - None, - ), - ( - [ - 24, 61, 159, 11, 70, 12, 177, 252, 244, 238, 130, 73, 202, 69, 102, 83, 33, 103, 82, - 66, 83, 191, 149, 187, 141, 111, 253, 110, 49, 5, 47, 151, - ], - None, - ), - ( - [ - 29, 239, 118, 17, 75, 98, 148, 167, 142, 190, 223, 175, 98, 255, 153, 111, 127, 169, - 62, 234, 90, 89, 90, 70, 218, 161, 233, 150, 89, 173, 19, 1, - ], - None, - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(6), - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(5), - ), - ( - [ - 45, 31, 195, 30, 201, 235, 73, 88, 57, 130, 35, 53, 202, 191, 20, 156, 125, 123, 37, - 49, 154, 194, 124, 157, 198, 236, 233, 25, 195, 174, 157, 31, - ], - None, - ), - ( - [ - 5, 59, 32, 123, 40, 100, 50, 132, 2, 194, 104, 95, 21, 23, 52, 56, 125, 198, 102, 210, - 24, 44, 99, 255, 185, 255, 151, 249, 67, 167, 189, 85, - ], - None, - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(9), - ), - ( - [ - 36, 131, 231, 53, 12, 14, 62, 144, 170, 248, 90, 226, 125, 178, 99, 87, 101, 226, 179, - 43, 110, 130, 233, 194, 112, 209, 74, 219, 154, 48, 41, 148, - ], - None, - ), - ( - [ - 12, 110, 79, 229, 117, 215, 178, 45, 227, 65, 183, 14, 91, 45, 170, 232, 126, 71, 37, - 211, 160, 77, 148, 223, 50, 144, 134, 232, 83, 159, 131, 62, - ], - None, - ), - ( - [ - 28, 57, 110, 171, 41, 144, 47, 162, 132, 221, 102, 100, 30, 69, 249, 176, 87, 134, 133, - 207, 250, 166, 139, 16, 73, 39, 11, 139, 158, 182, 43, 68, - ], - None, - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(11), - ), - ( - [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ], - Some(10), - ), - ( - [ - 25, 88, 170, 121, 91, 234, 185, 213, 24, 92, 209, 146, 109, 134, 118, 242, 74, 218, 69, - 28, 87, 154, 207, 86, 218, 48, 182, 206, 8, 9, 35, 240, - ], - None, - ), -]; - -/// Test correctness of subtree updates during updates. -/// The test data is a sequence of leaves with some nullifications -/// and the result of a randomized tests which has triggered subtree inconsistencies. -/// 1. Test subtree consistency with test data -/// 2. Test subtree consistency of updating the right most leaf -#[test] -fn test_subtree_updates() { - const HEIGHT: usize = 26; - let mut ref_mt = - light_merkle_tree_reference::MerkleTree::::new(HEIGHT, 0); - let mut con_mt = - light_concurrent_merkle_tree::ConcurrentMerkleTree26::::new( - HEIGHT, 1400, 2400, 0, - ) - .unwrap(); - let mut spl_concurrent_mt = - spl_concurrent_merkle_tree::concurrent_merkle_tree::ConcurrentMerkleTree::::new(); - spl_concurrent_mt.initialize().unwrap(); - con_mt.init().unwrap(); - assert_eq!(ref_mt.root(), con_mt.root()); - for leaf in LEAVES_WITH_NULLIFICATIONS.iter() { - match leaf.1 { - Some(index) => { - let change_log_index = con_mt.changelog_index(); - let mut proof = ref_mt.get_proof_of_leaf(index, false).unwrap(); - let old_leaf = ref_mt.leaf(index); - let current_root = con_mt.root(); - spl_concurrent_mt - .set_leaf( - current_root, - old_leaf, - [0u8; 32], - proof.to_array::().unwrap().as_slice(), - index.try_into().unwrap(), - ) - .unwrap(); - con_mt - .update(change_log_index, &old_leaf, &[0u8; 32], index, &mut proof) - .unwrap(); - ref_mt.update(&[0u8; 32], index).unwrap(); - } - None => { - con_mt.append(&leaf.0).unwrap(); - ref_mt.append(&leaf.0).unwrap(); - spl_concurrent_mt.append(leaf.0).unwrap(); - } - } - assert_eq!(spl_concurrent_mt.get_root(), ref_mt.root()); - assert_eq!(spl_concurrent_mt.get_root(), con_mt.root()); - assert_eq!(ref_mt.root(), con_mt.root()); - } - let index = con_mt.next_index() - 1; - // test rightmost leaf edge case - let change_log_index = con_mt.changelog_index(); - let mut proof = ref_mt.get_proof_of_leaf(index, false).unwrap(); - let old_leaf = ref_mt.leaf(index); - let current_root = con_mt.root(); - spl_concurrent_mt - .set_leaf( - current_root, - old_leaf, - [0u8; 32], - proof.to_array::().unwrap().as_slice(), - index.try_into().unwrap(), - ) - .unwrap(); - con_mt - .update(change_log_index, &old_leaf, &[0u8; 32], index, &mut proof) - .unwrap(); - ref_mt.update(&[0u8; 32], index).unwrap(); - - assert_eq!(spl_concurrent_mt.get_root(), ref_mt.root()); - assert_eq!(spl_concurrent_mt.get_root(), con_mt.root()); - assert_eq!(ref_mt.root(), con_mt.root()); - - let leaf = [3u8; 32]; - con_mt.append(&leaf).unwrap(); - ref_mt.append(&leaf).unwrap(); - spl_concurrent_mt.append(leaf).unwrap(); - - assert_eq!(spl_concurrent_mt.get_root(), ref_mt.root()); - assert_eq!(spl_concurrent_mt.get_root(), con_mt.root()); - assert_eq!(ref_mt.root(), con_mt.root()); -} - -/// Tests an update of a leaf which was modified by another updates. -fn update_already_modified_leaf< - H, - // Number of conflicting updates of the same leaf. - const CONFLICTS: usize, - // Number of appends of random leaves before submitting the conflicting - // updates. - const RANDOM_APPENDS_BEFORE_CONFLICTS: usize, - // Number of appends of random leaves after every single conflicting - // update. - const RANDOM_APPENDS_AFTER_EACH_CONFLICT: usize, ->() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const MAX_CHANGELOG: usize = 8; - const MAX_ROOTS: usize = 8; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, MAX_CHANGELOG, MAX_ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - // Create tree with a single leaf. - let first_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - merkle_tree.append(&first_leaf).unwrap(); - reference_tree.append(&first_leaf).unwrap(); - - // Save a proof of the first append. - let outdated_changelog_index = merkle_tree.changelog_index(); - let mut outdated_proof = reference_tree.get_proof_of_leaf(0, false).unwrap().clone(); - - let mut old_leaf = first_leaf; - for _ in 0..CONFLICTS { - // Update leaf. Always use an up-to-date proof. - let mut up_to_date_proof = reference_tree.get_proof_of_leaf(0, false).unwrap(); - let new_leaf = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - merkle_tree - .update( - merkle_tree.changelog_index(), - &old_leaf, - &new_leaf, - 0, - &mut up_to_date_proof, - ) - .unwrap(); - reference_tree.update(&new_leaf, 0).unwrap(); - - old_leaf = new_leaf; - - assert_eq!(merkle_tree.root(), reference_tree.root()); - } - - // Update leaf. This time, try using an outdated proof. - let new_leaf = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let res = merkle_tree.update( - outdated_changelog_index, - &first_leaf, - &new_leaf, - 0, - &mut outdated_proof, - ); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::CannotUpdateLeaf) - )); -} - -#[test] -fn test_update_already_modified_leaf_keccak_1_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_1_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_1_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_1_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_1_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_1_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_1_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_1_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_1_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_2_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_2_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_2_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_2_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_2_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_2_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_2_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_2_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_2_2_2() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_4_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_4_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_4_0_0() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_4_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_4_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_4_1_1() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_keccak_4_4_4() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_poseidon_4_4_4() { - update_already_modified_leaf::() -} - -#[test] -fn test_update_already_modified_leaf_sha256_4_4_4() { - update_already_modified_leaf::() -} - -/// Checks whether the [`changelog_entries`](ConcurrentMerkleTree::changelog_entries) -/// method returns an iterator with expected entries. -/// -/// We expect the `changelog_entries` method to return an iterator with entries -/// newer than the requested index. -/// -/// # Examples -/// -/// (In the tree) `current_index`: 1 -/// (Requested) `changelog_index`: 1 -/// Expected iterator: `[]` (empty) -/// -/// (In the tree) `current_index`: 3 -/// (Requested) `changelog_index`: 1 -/// Expected iterator: `[2, 3]` (1 is skipped) -/// -/// Changelog capacity: 12 -/// (In the tree) `current_index`: 9 -/// (Requested) `changelog_index`: 3 (lowed than `current_index`, because the -/// changelog is full and started overwriting values from the head) -/// Expected iterator: `[10, 11, 12, 13, 14, 15]` (9 is skipped) -fn changelog_entries() -where - H: Hasher, -{ - const HEIGHT: usize = 26; - const CHANGELOG: usize = 12; - const ROOTS: usize = 16; - const CANOPY: usize = 0; - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - merkle_tree - .append(&[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 1, - ]) - .unwrap(); - - let changelog_entries = merkle_tree - .changelog_entries(1) - .unwrap() - .collect::>(); - assert!(changelog_entries.is_empty()); - - // Try getting changelog entries out of bounds. - for start in merkle_tree.changelog.len()..1000 { - let changelog_entries = merkle_tree.changelog_entries(start); - assert!(matches!( - changelog_entries, - Err(ConcurrentMerkleTreeError::BoundedVec( - BoundedVecError::IterFromOutOfBounds - )) - )); - } - - merkle_tree - .append(&[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 2, - ]) - .unwrap(); - merkle_tree - .append(&[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 3, - ]) - .unwrap(); - - let changelog_leaves = merkle_tree - .changelog_entries(1) - .unwrap() - .map(|changelog_entry| changelog_entry.path[0]) - .collect::>(); - assert_eq!( - changelog_leaves.as_slice(), - &[ - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 2 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 3 - ]) - ] - ); - - // Try getting changelog entries out of bounds. - for start in merkle_tree.changelog.len()..1000 { - let changelog_entries = merkle_tree.changelog_entries(start); - assert!(matches!( - changelog_entries, - Err(ConcurrentMerkleTreeError::BoundedVec( - BoundedVecError::IterFromOutOfBounds - )) - )); - } - - for i in 4_u8..16_u8 { - merkle_tree - .append(&[ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, i, - ]) - .unwrap(); - } - - let changelog_leaves = merkle_tree - .changelog_entries(9) - .unwrap() - .map(|changelog_entry| changelog_entry.path[0]) - .collect::>(); - assert_eq!( - changelog_leaves.as_slice(), - &[ - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 10 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 11 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 12 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 13 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 14 - ]), - Some([ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 15 - ]) - ] - ); - - // Try getting changelog entries out of bounds. - for start in merkle_tree.changelog.len()..1000 { - let changelog_entries = merkle_tree.changelog_entries(start); - assert!(matches!( - changelog_entries, - Err(ConcurrentMerkleTreeError::BoundedVec( - BoundedVecError::IterFromOutOfBounds - )) - )); - } -} - -#[test] -fn changelog_entries_keccak() { - changelog_entries::() -} - -#[test] -fn changelog_entries_poseidon() { - changelog_entries::() -} - -#[test] -fn changelog_entries_sha256() { - changelog_entries::() -} - -/// Checks whether the [`changelog_entries`](ConcurrentMerkleTree::changelog_entries) -/// method returns an iterator with expected entries. -/// -/// It tests random insertions and updates and checks the consistency of leaves -/// (`path[0]`) in changelogs. -fn changelog_entries_random< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, ->() -where - H: Hasher, -{ - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - let changelog_entries = merkle_tree - .changelog_entries(0) - .unwrap() - .collect::>(); - assert!(changelog_entries.is_empty()); - - // Requesting changelog entries starting from the current `changelog_index()` - // should always return an empty iterator. - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog_index()) - .unwrap() - .collect::>(); - assert!(changelog_entries.is_empty()); - - // Vector of changelog indices after each operation. - let mut leaf_indices = CyclicBoundedVec::with_capacity(CHANGELOG); - // Vector of roots after each operation. - let mut roots = CyclicBoundedVec::with_capacity(CHANGELOG); - // Vector of merkle paths we get from the reference tree after each operation. - let mut merkle_paths = CyclicBoundedVec::with_capacity(CHANGELOG); - // Changelog is always initialized with a changelog path consisting of zero - // bytes. For consistency, we need to assert the 1st zero byte as the first - // expected leaf in the changelog. - let merkle_path = reference_tree.get_path_of_leaf(0, true).unwrap(); - leaf_indices.push(0); - merkle_paths.push(merkle_path); - roots.push(merkle_tree.root()); - - for _ in 0..1000 { - // Append random leaf. - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let leaf_index = merkle_tree.next_index(); - merkle_tree.append(&leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - - leaf_indices.push(leaf_index); - roots.push(merkle_tree.root()); - let merkle_path = reference_tree.get_path_of_leaf(leaf_index, true).unwrap(); - merkle_paths.push(merkle_path); - - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog.first_index()) - .unwrap() - .collect::>(); - assert_eq!(changelog_entries.len(), merkle_paths.len() - 1); - - for ((leaf_index, merkle_path), changelog_entry) in leaf_indices - .iter() - .skip(1) - .zip(merkle_paths.iter().skip(1)) - .zip(changelog_entries) - { - assert_eq!(changelog_entry.index, *leaf_index as u64); - for i in 0..HEIGHT { - let changelog_node = changelog_entry.path[i].unwrap(); - let path_node = merkle_path[i]; - assert_eq!(changelog_node, path_node); - } - } - - // Requesting changelog entries starting from the current `changelog_index()` - // should always return an empty iterator. - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog_index()) - .unwrap() - .collect::>(); - assert!(changelog_entries.is_empty()); - - // Update random leaf. - let leaf_index = rng.gen_range(0..reference_tree.leaves().len()); - let old_leaf = reference_tree.leaf(leaf_index); - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof = reference_tree.get_proof_of_leaf(leaf_index, false).unwrap(); - merkle_tree - .update( - merkle_tree.changelog_index(), - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - reference_tree.update(&new_leaf, leaf_index).unwrap(); - - leaf_indices.push(leaf_index); - roots.push(merkle_tree.root()); - let merkle_path = reference_tree.get_path_of_leaf(leaf_index, true).unwrap(); - merkle_paths.push(merkle_path); - - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog.first_index()) - .unwrap() - .collect::>(); - assert_eq!(changelog_entries.len(), merkle_paths.len() - 1); - - for ((leaf_index, merkle_path), changelog_entry) in leaf_indices - .iter() - .skip(1) - .zip(merkle_paths.iter().skip(1)) - .zip(changelog_entries) - { - assert_eq!(changelog_entry.index, *leaf_index as u64); - for i in 0..HEIGHT { - let changelog_node = changelog_entry.path[i].unwrap(); - let path_node = merkle_path[i]; - assert_eq!(changelog_node, path_node); - } - } - - // Requesting changelog entries starting from the current `changelog_index()` - // should always return an empty iterator. - let changelog_entries = merkle_tree - .changelog_entries(merkle_tree.changelog_index()) - .unwrap() - .collect::>(); - assert!(changelog_entries.is_empty()); - } -} - -#[test] -fn test_changelog_entries_random_keccak_26_256_256_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - changelog_entries_random::() -} - -#[test] -fn test_changelog_entries_random_keccak_26_256_256_10() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 10; - changelog_entries_random::() -} - -#[test] -fn test_changelog_entries_random_poseidon_26_256_256_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - changelog_entries_random::() -} - -#[test] -fn test_changelog_entries_random_poseidon_26_256_256_10() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 10; - changelog_entries_random::() -} - -#[test] -fn test_changelog_entries_random_sha256_26_256_256_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - changelog_entries_random::() -} - -#[test] -fn test_changelog_entries_random_sha256_26_256_256_10() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 10; - changelog_entries_random::() -} - -/// When reading the tests above (`changelog_entries`, `changelog_entries_random`) -/// you might be still wondering why is skipping the **current** changelog element -/// necessary. -/// -/// The explanation is that not skipping the current element might produce leaf -/// conflicts. Imagine that we insert a leaf and then we try to immediately update -/// it. Starting the iteration -/// -/// This test reproduces that case and serves as a proof that skipping is the -/// right action. -fn changelog_iteration_without_skipping< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, ->() -where - H: Hasher, -{ - /// A broken re-implementation of `ConcurrentMerkleTree::update_proof_from_changelog` - /// which reproduces the described issue. - fn update_proof_from_changelog( - merkle_tree: &ConcurrentMerkleTree, - changelog_index: usize, - leaf_index: usize, - proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(), ConcurrentMerkleTreeError> - where - H: Hasher, - { - for changelog_entry in merkle_tree.changelog.iter_from(changelog_index).unwrap() { - changelog_entry.update_proof(leaf_index, proof)?; - } - - Ok(()) - } - - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - merkle_tree.append(&leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - - let mut proof = reference_tree.get_proof_of_leaf(0, false).unwrap(); - - let res = - update_proof_from_changelog(&merkle_tree, merkle_tree.changelog_index(), 0, &mut proof); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::CannotUpdateLeaf) - )); -} - -#[test] -fn test_changelog_interation_without_skipping_keccak_26_16_16_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 16; - const ROOTS: usize = 16; - const CANOPY: usize = 0; - changelog_iteration_without_skipping::() -} - -#[test] -fn test_changelog_interation_without_skipping_poseidon_26_16_16_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 16; - const ROOTS: usize = 16; - const CANOPY: usize = 0; - changelog_iteration_without_skipping::() -} - -#[test] -fn test_changelog_interation_without_skipping_sha256_26_16_16_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 16; - const ROOTS: usize = 16; - const CANOPY: usize = 0; - changelog_iteration_without_skipping::() -} - -/// Tests an update with an old `changelog_index` and proof, which refers to the -/// state before the changelog wrap-around (enough new operations to overwrite -/// the whole changelog). Such an update should fail, -fn update_changelog_wrap_around< - H, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, ->() -where - H: Hasher, -{ - let mut merkle_tree = - ConcurrentMerkleTree::::new(HEIGHT, CHANGELOG, ROOTS, CANOPY).unwrap(); - merkle_tree.init().unwrap(); - - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(HEIGHT, CANOPY); - - let mut rng = thread_rng(); - - // The leaf which we will want to update with an expired changelog. - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let (changelog_index, _) = merkle_tree.append(&leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - let mut proof = reference_tree.get_proof_of_leaf(0, false).unwrap(); - - // Perform enough appends and updates to overfill the changelog - for i in 0..CHANGELOG { - if i % 2 == 0 { - // Append random leaf. - let leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - merkle_tree.append(&leaf).unwrap(); - reference_tree.append(&leaf).unwrap(); - } else { - // Update random leaf. - let leaf_index = rng.gen_range(1..reference_tree.leaves().len()); - let old_leaf = reference_tree.leaf(leaf_index); - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - let mut proof = reference_tree.get_proof_of_leaf(leaf_index, false).unwrap(); - merkle_tree - .update( - merkle_tree.changelog_index(), - &old_leaf, - &new_leaf, - leaf_index, - &mut proof, - ) - .unwrap(); - reference_tree.update(&new_leaf, leaf_index).unwrap(); - } - } - - // Try to update the original `leaf` with an outdated proof and changelog - // index. Expect an error. - let new_leaf: [u8; 32] = Fr::rand(&mut rng) - .into_bigint() - .to_bytes_be() - .try_into() - .unwrap(); - - let res = merkle_tree.update(changelog_index, &leaf, &new_leaf, 0, &mut proof); - assert!(matches!( - res, - Err(ConcurrentMerkleTreeError::InvalidProof(_, _)) - )); - - // Try to update the original `leaf` with an up-to-date proof and changelog - // index. Expect a success. - let changelog_index = merkle_tree.changelog_index(); - let mut proof = reference_tree.get_proof_of_leaf(0, false).unwrap(); - merkle_tree - .update(changelog_index, &leaf, &new_leaf, 0, &mut proof) - .unwrap(); -} - -#[test] -fn test_update_changelog_wrap_around_keccak_26_256_512_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - update_changelog_wrap_around::() -} - -#[test] -fn test_update_changelog_wrap_around_poseidon_26_256_512_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - update_changelog_wrap_around::() -} - -#[test] -fn test_update_changelog_wrap_around_sha256_26_256_512_0() { - const HEIGHT: usize = 26; - const CHANGELOG: usize = 256; - const ROOTS: usize = 256; - const CANOPY: usize = 0; - update_changelog_wrap_around::() -} - -#[test] -fn test_append_batch() { - let mut tree = ConcurrentMerkleTree::::new(2, 2, 2, 1).unwrap(); - tree.init().unwrap(); - let leaf_0 = [0; 32]; - let leaf_1 = [1; 32]; - tree.append_batch(&[&leaf_0, &leaf_1]).unwrap(); - let change_log_0 = &tree - .changelog - .get(tree.changelog.first_index()) - .unwrap() - .path; - let change_log_1 = &tree - .changelog - .get(tree.changelog.last_index()) - .unwrap() - .path; - let path_0 = ChangelogPath([Some(leaf_0), None]); - let path_1 = ChangelogPath([ - Some(leaf_1), - Some(Sha256::hashv(&[&leaf_0, &leaf_1]).unwrap()), - ]); - - assert_eq!(change_log_1, &path_1); - assert_eq!(change_log_0, &path_0); -} - -/// Tests that updating proof with changelog entries with incomplete paths (coming -/// from batched appends) works. -#[test] -fn test_append_batch_and_update() { - let mut tree = ConcurrentMerkleTree::::new(3, 10, 10, 0).unwrap(); - tree.init().unwrap(); - - let mut reference_tree = light_merkle_tree_reference::MerkleTree::::new(3, 0); - - // Append two leaves. - let leaf_0 = [0; 32]; - let leaf_1 = [1; 32]; - tree.append_batch(&[&leaf_0, &leaf_1]).unwrap(); - reference_tree.append(&leaf_0).unwrap(); - reference_tree.append(&leaf_1).unwrap(); - - let changelog_index = tree.changelog_index(); - let mut proof_leaf_0 = reference_tree.get_proof_of_leaf(0, false).unwrap(); - let mut proof_leaf_1 = reference_tree.get_proof_of_leaf(1, false).unwrap(); - - // Append another two leaves. - let leaf_2 = [2; 32]; - let leaf_3 = [3; 32]; - tree.append_batch(&[&leaf_2, &leaf_3]).unwrap(); - reference_tree.append(&leaf_2).unwrap(); - reference_tree.append(&leaf_3).unwrap(); - - let changelog_entry_leaf_2 = &tree.changelog[3]; - // Make sure that the non-terminal changelog entry has `None` nodes. - assert_eq!( - changelog_entry_leaf_2.path, - ChangelogPath([Some([2; 32]), None, None]) - ); - let changelog_entry_leaf_3 = &tree.changelog[4]; - // And that the terminal one has no `None` nodes. - assert_eq!( - changelog_entry_leaf_3.path, - ChangelogPath([ - Some([3; 32]), - Some([ - 39, 243, 47, 187, 250, 194, 251, 187, 206, 88, 177, 7, 82, 20, 75, 90, 116, 70, - 212, 185, 30, 75, 169, 15, 253, 238, 48, 94, 145, 89, 128, 232 - ]), - Some([ - 211, 95, 81, 105, 147, 137, 218, 126, 236, 124, 229, 235, 2, 100, 12, 109, 49, 140, - 245, 26, 227, 158, 202, 137, 11, 188, 123, 132, 236, 181, 218, 104 - ]) - ]) - ); - - // The tree (only the used fragment) looks like: - // - // _ H2 _ - // / \ - // H0 H1 - // / \ / \ - // L0 L1 L2 L3 - - // Update `leaf_0`. Expect a success. - let new_leaf_0 = [10; 32]; - tree.update(changelog_index, &leaf_0, &new_leaf_0, 0, &mut proof_leaf_0) - .unwrap(); - - // Update `leaf_1`. Expect a success. - let new_leaf_1 = [20; 32]; - tree.update(changelog_index, &leaf_1, &new_leaf_1, 1, &mut proof_leaf_1) - .unwrap(); -} - -/// Makes sure canopy works by: -/// -/// 1. Appending 3 leaves. -/// 2. Updating the first leaf. -/// 3. Updating the second leaf. -fn update_with_canopy() -where - H: Hasher, -{ - let mut tree = ConcurrentMerkleTree::::new(2, 2, 2, 1).unwrap(); - tree.init().unwrap(); - let leaf_0 = [0; 32]; - let leaf_1 = [1; 32]; - let leaf_2 = [2; 32]; - tree.append(&leaf_0).unwrap(); - tree.append(&leaf_1).unwrap(); - tree.append(&leaf_2).unwrap(); - let old_canopy = tree.canopy.as_slice()[0]; - - let new_leaf_0 = [1; 32]; - let mut leaf_0_proof = BoundedVec::with_capacity(2); - leaf_0_proof.push(leaf_1).unwrap(); - tree.update( - tree.changelog_index(), - &leaf_0, - &new_leaf_0, - 0, - &mut leaf_0_proof, - ) - .unwrap(); - let new_canopy = tree.canopy.as_slice()[0]; - - assert_ne!(old_canopy, new_canopy); - - let new_leaf_2 = [3; 32]; - let mut leaf_2_proof = BoundedVec::with_capacity(2); - leaf_2_proof.push([0; 32]).unwrap(); - tree.update( - tree.changelog_index(), - &leaf_2, - &new_leaf_2, - 2, - &mut leaf_2_proof, - ) - .unwrap(); -} - -#[test] -fn test_update_with_canopy_keccak() { - update_with_canopy::() -} - -#[test] -fn test_update_with_canopy_poseidon() { - update_with_canopy::() -} - -#[test] -fn test_update_with_canopy_sha256() { - update_with_canopy::() -} diff --git a/hash-set/Cargo.lock b/hash-set/Cargo.lock deleted file mode 100644 index e99243a..0000000 --- a/hash-set/Cargo.lock +++ /dev/null @@ -1,1875 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom 0.2.15", - "once_cell", - "version_check", -] - -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "anchor-attribute-access-control" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5f619f1d04f53621925ba8a2e633ba5a6081f2ae14758cbb67f38fd823e0a3e" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-account" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f2a3e1df4685f18d12a943a9f2a7456305401af21a07c9fe076ef9ecd6e400" -dependencies = [ - "anchor-syn", - "bs58 0.5.1", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-constant" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9423945cb55627f0b30903288e78baf6f62c6c8ab28fb344b6b25f1ffee3dca7" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-error" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ed12720033cc3c3bf3cfa293349c2275cd5ab99936e33dd4bf283aaad3e241" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-event" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eef4dc0371eba2d8c8b54794b0b0eb786a234a559b77593d6f80825b6d2c77a2" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-program" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b18c4f191331e078d4a6a080954d1576241c29c56638783322a18d308ab27e4f" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-accounts" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de10d6e9620d3bcea56c56151cad83c5992f50d5960b3a9bebc4a50390ddc3c" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-serde" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4e2e5be518ec6053d90a2a7f26843dbee607583c779e6c8395951b9739bdfbe" -dependencies = [ - "anchor-syn", - "borsh-derive-internal 0.10.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-space" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecc31d19fa54840e74b7a979d44bcea49d70459de846088a1d71e87ba53c419" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-lang" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35da4785497388af0553586d55ebdc08054a8b1724720ef2749d313494f2b8ad" -dependencies = [ - "anchor-attribute-access-control", - "anchor-attribute-account", - "anchor-attribute-constant", - "anchor-attribute-error", - "anchor-attribute-event", - "anchor-attribute-program", - "anchor-derive-accounts", - "anchor-derive-serde", - "anchor-derive-space", - "arrayref", - "base64 0.13.1", - "bincode", - "borsh 0.10.4", - "bytemuck", - "getrandom 0.2.15", - "solana-program", - "thiserror", -] - -[[package]] -name = "anchor-syn" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9101b84702fed2ea57bd22992f75065da5648017135b844283a2f6d74f27825" -dependencies = [ - "anyhow", - "bs58 0.5.1", - "heck", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2 0.10.8", - "syn 1.0.109", - "thiserror", -] - -[[package]] -name = "anyhow" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" - -[[package]] -name = "ark-bn254" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" -dependencies = [ - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", - "itertools", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "digest 0.10.7", - "itertools", - "num-bigint", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-poly" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" -dependencies = [ - "serde", -] - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "blake3" -version = "1.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", - "digest 0.10.7", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "borsh" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa" -dependencies = [ - "borsh-derive 0.9.3", - "hashbrown 0.11.2", -] - -[[package]] -name = "borsh" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115e54d64eb62cdebad391c19efc9dce4981c690c85a33a12199d99bb9546fee" -dependencies = [ - "borsh-derive 0.10.4", - "hashbrown 0.13.2", -] - -[[package]] -name = "borsh" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" -dependencies = [ - "borsh-derive 1.5.3", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775" -dependencies = [ - "borsh-derive-internal 0.9.3", - "borsh-schema-derive-internal 0.9.3", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831213f80d9423998dd696e2c5345aba6be7a0bd8cd19e31c5243e13df1cef89" -dependencies = [ - "borsh-derive-internal 0.10.4", - "borsh-schema-derive-internal 0.10.4", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" -dependencies = [ - "once_cell", - "proc-macro-crate 3.2.0", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65d6ba50644c98714aa2a70d13d7df3cd75cd2b523a2b452bf010443800976b3" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276691d96f063427be83e6692b86148e488ebba9f48f77788724ca027ba3b6d4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "bv" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" -dependencies = [ - "feature-probe", - "serde", -] - -[[package]] -name = "bytemuck" -version = "1.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "cc" -version = "1.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" -dependencies = [ - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - -[[package]] -name = "console_log" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89f72f65e8501878b8a004d5a1afb780987e2ce2b4532c562e367a72c57499f" -dependencies = [ - "log", - "web-sys", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - -[[package]] -name = "cpufeatures" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" -dependencies = [ - "libc", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array", - "subtle", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "serde", - "subtle", - "zeroize", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer 0.10.4", - "crypto-common", - "subtle", -] - -[[package]] -name = "either" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "feature-probe" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "serde", - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash 0.7.8", -] - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash 0.8.11", -] - -[[package]] -name = "hashbrown" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" - -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array", - "hmac", -] - -[[package]] -name = "im" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" -dependencies = [ - "bitmaps", - "rand_core 0.6.4", - "rand_xoshiro", - "rayon", - "serde", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "indexmap" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" -dependencies = [ - "equivalent", - "hashbrown 0.15.2", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" - -[[package]] -name = "jobserver" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "libc" -version = "0.2.169" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - -[[package]] -name = "libsecp256k1" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" -dependencies = [ - "arrayref", - "base64 0.12.3", - "digest 0.9.0", - "hmac-drbg", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand 0.7.3", - "serde", - "sha2 0.9.9", - "typenum", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "light-bounded-vec" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47ced86d6f1b163a04d5d0be44f8bbeedb11d32f73af27812bbd144e0f1f1a42" -dependencies = [ - "bytemuck", - "memoffset", - "thiserror", -] - -[[package]] -name = "light-hash-set" -version = "1.2.0" -dependencies = [ - "ark-bn254", - "ark-ff", - "light-bounded-vec", - "light-heap", - "light-utils", - "memoffset", - "num-bigint", - "num-traits", - "rand 0.8.5", - "solana-program", - "thiserror", -] - -[[package]] -name = "light-heap" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7304b9ef6e32c540b685fb3cb13081db831b8f4ea03d1d5d54491dee19100eb5" -dependencies = [ - "anchor-lang", -] - -[[package]] -name = "light-poseidon" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9a85a9752c549ceb7578064b4ed891179d20acd85f27318573b64d2d7ee7ee" -dependencies = [ - "ark-bn254", - "ark-ff", - "num-bigint", - "thiserror", -] - -[[package]] -name = "light-utils" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e2b5a4959cb0456b483a20b4f3930920949137c00e76b5d0f9bf8d701a3c6a" -dependencies = [ - "anyhow", - "ark-bn254", - "ark-ff", - "light-bounded-vec", - "num-bigint", - "rand 0.8.5", - "solana-program", - "thiserror", -] - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "memmap2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "num-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" - -[[package]] -name = "opaque-debug" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "pbkdf2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" -dependencies = [ - "crypto-mac", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-crate" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro2" -version = "1.0.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core 0.6.4", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" -dependencies = [ - "bitflags", -] - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" - -[[package]] -name = "ryu" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "semver" -version = "1.0.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" - -[[package]] -name = "serde" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_bytes" -version = "0.11.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_json" -version = "1.0.134" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "solana-frozen-abi" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a6ef2db80dceb124b7bf81cca3300804bf427d2711973fc3df450ed7dfb26d" -dependencies = [ - "block-buffer 0.10.4", - "bs58 0.4.0", - "bv", - "either", - "generic-array", - "im", - "lazy_static", - "log", - "memmap2", - "rustc_version", - "serde", - "serde_bytes", - "serde_derive", - "sha2 0.10.8", - "solana-frozen-abi-macro", - "subtle", - "thiserror", -] - -[[package]] -name = "solana-frozen-abi-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70088de7d4067d19a7455609e2b393e6086bd847bb39c4d2bf234fc14827ef9e" -dependencies = [ - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.90", -] - -[[package]] -name = "solana-program" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb2b2c8babfae4cace1a25b6efa00418f3acd852cf55d7cecc0360d3c5050479" -dependencies = [ - "ark-bn254", - "ark-ec", - "ark-ff", - "ark-serialize", - "base64 0.21.7", - "bincode", - "bitflags", - "blake3", - "borsh 0.10.4", - "borsh 0.9.3", - "borsh 1.5.3", - "bs58 0.4.0", - "bv", - "bytemuck", - "cc", - "console_error_panic_hook", - "console_log", - "curve25519-dalek", - "getrandom 0.2.15", - "itertools", - "js-sys", - "lazy_static", - "libc", - "libsecp256k1", - "light-poseidon", - "log", - "memoffset", - "num-bigint", - "num-derive", - "num-traits", - "parking_lot", - "rand 0.8.5", - "rustc_version", - "rustversion", - "serde", - "serde_bytes", - "serde_derive", - "serde_json", - "sha2 0.10.8", - "sha3", - "solana-frozen-abi", - "solana-frozen-abi-macro", - "solana-sdk-macro", - "thiserror", - "tiny-bip39", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "solana-sdk-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c55c196c8050834c391a34b58e3c9fd86b15452ef1feeeafa1dbeb9d2291dfec" -dependencies = [ - "bs58 0.4.0", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.90", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tiny-bip39" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc59cb9dfc85bb312c3a78fd6aa8a8582e310b0fa885d5bb877f6dcc601839d" -dependencies = [ - "anyhow", - "hmac", - "once_cell", - "pbkdf2", - "rand 0.7.3", - "rustc-hash", - "sha2 0.9.9", - "thiserror", - "unicode-normalization", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "tinyvec" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap", - "toml_datetime", - "winnow", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "unicode-ident" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" - -[[package]] -name = "unicode-normalization" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" -dependencies = [ - "cfg-if", - "once_cell", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" - -[[package]] -name = "web-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "byteorder", - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "zeroize" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] diff --git a/hash-set/Cargo.toml b/hash-set/Cargo.toml deleted file mode 100644 index d2bad1a..0000000 --- a/hash-set/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "light-hash-set" -version = "1.2.0" -description = "Hash set which can be stored on a Solana account" -repository = "https://github.com/Lightprotocol/light-protocol" -license = "Apache-2.0" -edition = "2021" - -[features] -solana = ["solana-program"] - -[dependencies] -light-bounded-vec = { version = "1.1.0" } -light-utils = { version = "1.1.0" } -memoffset = "0.9" -num-bigint = "0.4" -num-traits = "0.2" -solana-program = { version="=1.18.22", optional = true } -thiserror = "1.0" -[target.'cfg(target_os = "solana")'.dependencies] -light-heap = { version = "1.1.0" } - -[dev-dependencies] -ark-bn254 = "0.4" -ark-ff = "0.4" -rand = "0.8" diff --git a/hash-set/src/lib.rs b/hash-set/src/lib.rs deleted file mode 100644 index 9c74f0b..0000000 --- a/hash-set/src/lib.rs +++ /dev/null @@ -1,1167 +0,0 @@ -use light_utils::{bigint::bigint_to_be_bytes_array, UtilsError}; -use num_bigint::{BigUint, ToBigUint}; -use num_traits::{FromBytes, ToPrimitive}; -use std::{ - alloc::{self, handle_alloc_error, Layout}, - cmp::Ordering, - marker::Send, - mem, - ptr::NonNull, -}; -use thiserror::Error; - -pub mod zero_copy; - -pub const ITERATIONS: usize = 20; - -#[derive(Debug, Error, PartialEq)] -pub enum HashSetError { - #[error("The hash set is full, cannot add any new elements")] - Full, - #[error("The provided element is already in the hash set")] - ElementAlreadyExists, - #[error("The provided element doesn't exist in the hash set")] - ElementDoesNotExist, - #[error("Could not convert the index from/to usize")] - UsizeConv, - #[error("Integer overflow")] - IntegerOverflow, - #[error("Invalid buffer size, expected {0}, got {1}")] - BufferSize(usize, usize), - #[error("Utils: big integer conversion error")] - Utils(#[from] UtilsError), -} - -#[cfg(feature = "solana")] -impl From for u32 { - fn from(e: HashSetError) -> u32 { - match e { - HashSetError::Full => 9001, - HashSetError::ElementAlreadyExists => 9002, - HashSetError::ElementDoesNotExist => 9003, - HashSetError::UsizeConv => 9004, - HashSetError::IntegerOverflow => 9005, - HashSetError::BufferSize(_, _) => 9006, - HashSetError::Utils(e) => e.into(), - } - } -} - -#[cfg(feature = "solana")] -impl From for solana_program::program_error::ProgramError { - fn from(e: HashSetError) -> Self { - solana_program::program_error::ProgramError::Custom(e.into()) - } -} - -#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] -pub struct HashSetCell { - pub value: [u8; 32], - pub sequence_number: Option, -} - -unsafe impl Send for HashSet {} - -impl HashSetCell { - /// Returns the value as a byte array. - pub fn value_bytes(&self) -> [u8; 32] { - self.value - } - - /// Returns the value as a big number. - pub fn value_biguint(&self) -> BigUint { - BigUint::from_bytes_be(self.value.as_slice()) - } - - /// Returns the associated sequence number. - pub fn sequence_number(&self) -> Option { - self.sequence_number - } - - /// Checks whether the value is marked with a sequence number. - pub fn is_marked(&self) -> bool { - self.sequence_number.is_some() - } - - /// Checks whether the value is valid according to the provided - /// `current_sequence_number` (which usually should be a sequence number - /// associated with the Merkle tree). - /// - /// The value is valid if: - /// - /// * It was not annotated with sequence number. - /// * Its sequence number is lower than the provided `sequence_number`. - /// - /// The value is invalid if it's lower or equal to the provided - /// `sequence_number`. - pub fn is_valid(&self, current_sequence_number: usize) -> bool { - match self.sequence_number { - Some(sequence_number) => match sequence_number.cmp(¤t_sequence_number) { - Ordering::Less | Ordering::Equal => false, - Ordering::Greater => true, - }, - None => true, - } - } -} - -#[derive(Debug)] -pub struct HashSet { - /// Capacity of the buckets. - capacity: usize, - /// Difference of sequence numbers, after which the given element can be - /// replaced by an another one (with a sequence number higher than the - /// threshold). - pub sequence_threshold: usize, - - /// An array of buckets. It has a size equal to the expected number of - /// elements. - buckets: NonNull>, -} - -unsafe impl Send for HashSetCell {} - -impl HashSet { - /// Size of the struct **without** dynamically sized fields. - pub fn non_dyn_fields_size() -> usize { - // capacity - mem::size_of::() - // sequence_threshold - + mem::size_of::() - } - - /// Size which needs to be allocated on Solana account to fit the hash set. - pub fn size_in_account(capacity_values: usize) -> usize { - let dyn_fields_size = Self::non_dyn_fields_size(); - - let buckets_size_unaligned = mem::size_of::>() * capacity_values; - // Make sure that alignment of `values` matches the alignment of `usize`. - let buckets_size = buckets_size_unaligned + mem::align_of::() - - (buckets_size_unaligned % mem::align_of::()); - - dyn_fields_size + buckets_size - } - - // Create a new hash set with the given capacity - pub fn new(capacity_values: usize, sequence_threshold: usize) -> Result { - // SAFETY: It's just a regular allocation. - let layout = Layout::array::>(capacity_values).unwrap(); - let values_ptr = unsafe { alloc::alloc(layout) as *mut Option }; - if values_ptr.is_null() { - handle_alloc_error(layout); - } - let values = NonNull::new(values_ptr).unwrap(); - for i in 0..capacity_values { - unsafe { - std::ptr::write(values_ptr.add(i), None); - } - } - - Ok(HashSet { - sequence_threshold, - capacity: capacity_values, - buckets: values, - }) - } - - /// Creates a copy of `HashSet` from the given byte slice. - /// - /// # Purpose - /// - /// This method is meant to be used mostly in the SDK code, to convert - /// fetched Solana accounts to actual hash sets. Creating a copy is the - /// safest way of conversion in async Rust. - /// - /// # Safety - /// - /// This is highly unsafe. Ensuring the alignment and that the slice - /// provides actual actual data of the hash set is the caller's - /// responsibility. - pub unsafe fn from_bytes_copy(bytes: &mut [u8]) -> Result { - if bytes.len() < Self::non_dyn_fields_size() { - return Err(HashSetError::BufferSize( - Self::non_dyn_fields_size(), - bytes.len(), - )); - } - - let capacity = usize::from_le_bytes(bytes[0..8].try_into().unwrap()); - let sequence_threshold = usize::from_le_bytes(bytes[8..16].try_into().unwrap()); - let expected_size = Self::size_in_account(capacity); - if bytes.len() != expected_size { - return Err(HashSetError::BufferSize(expected_size, bytes.len())); - } - - let buckets_layout = Layout::array::>(capacity).unwrap(); - // SAFETY: `I` is always a signed integer. Creating a layout for an - // array of integers of any size won't cause any panic. - let buckets_dst_ptr = unsafe { alloc::alloc(buckets_layout) as *mut Option }; - if buckets_dst_ptr.is_null() { - handle_alloc_error(buckets_layout); - } - let buckets = NonNull::new(buckets_dst_ptr).unwrap(); - for i in 0..capacity { - std::ptr::write(buckets_dst_ptr.add(i), None); - } - - let offset = Self::non_dyn_fields_size() + mem::size_of::(); - let buckets_src_ptr = bytes.as_ptr().add(offset) as *const Option; - std::ptr::copy(buckets_src_ptr, buckets_dst_ptr, capacity); - - Ok(Self { - capacity, - sequence_threshold, - buckets, - }) - } - - fn probe_index(&self, value: &BigUint, iteration: usize) -> usize { - // Increase stepsize over the capacity of the hash set. - let iteration = iteration + self.capacity / 10; - let probe_index = (value - + iteration.to_biguint().unwrap() * iteration.to_biguint().unwrap()) - % self.capacity.to_biguint().unwrap(); - probe_index.to_usize().unwrap() - } - - /// Returns a reference to a bucket under the given `index`. Does not check - /// the validity. - pub fn get_bucket(&self, index: usize) -> Option<&Option> { - if index >= self.capacity { - return None; - } - let bucket = unsafe { &*self.buckets.as_ptr().add(index) }; - Some(bucket) - } - - /// Returns a mutable reference to a bucket under the given `index`. Does - /// not check the validity. - pub fn get_bucket_mut(&mut self, index: usize) -> Option<&mut Option> { - if index >= self.capacity { - return None; - } - let bucket = unsafe { &mut *self.buckets.as_ptr().add(index) }; - Some(bucket) - } - - /// Returns a reference to an unmarked bucket under the given index. If the - /// bucket is marked, returns `None`. - pub fn get_unmarked_bucket(&self, index: usize) -> Option<&Option> { - let bucket = self.get_bucket(index); - let is_unmarked = match bucket { - Some(Some(bucket)) => !bucket.is_marked(), - Some(None) => false, - None => false, - }; - if is_unmarked { - bucket - } else { - None - } - } - - pub fn get_capacity(&self) -> usize { - self.capacity - } - - fn insert_into_occupied_cell( - &mut self, - value_index: usize, - value: &BigUint, - current_sequence_number: usize, - ) -> Result { - // PANICS: We trust the bounds of `value_index` here. - let bucket = self.get_bucket_mut(value_index).unwrap(); - - match bucket { - // The cell in the value array is already taken. - Some(bucket) => { - // We can overwrite that cell only if the element - // is expired - when the difference between its - // sequence number and provided sequence number is - // greater than the threshold. - if let Some(element_sequence_number) = bucket.sequence_number { - if current_sequence_number >= element_sequence_number { - *bucket = HashSetCell { - value: bigint_to_be_bytes_array(value)?, - sequence_number: None, - }; - return Ok(true); - } - } - // Otherwise, we need to prevent having multiple valid - // elements with the same value. - if &BigUint::from_be_bytes(bucket.value.as_slice()) == value { - return Err(HashSetError::ElementAlreadyExists); - } - } - // Panics: If there is a hash set cell pointing to a `None` value, - // it means we really screwed up in the implementation... - // That should never happen. - None => unreachable!(), - } - Ok(false) - } - - /// Inserts a value into the hash set, with `self.capacity_values` attempts. - /// - /// Every attempt uses quadratic probing to find an empty cell or a cell - /// which can be overwritten. - /// - /// `current sequence_number` is used to check whether existing values can - /// be overwritten. - pub fn insert( - &mut self, - value: &BigUint, - current_sequence_number: usize, - ) -> Result { - let index_bucket = self.find_element_iter(value, current_sequence_number, 0, ITERATIONS)?; - let (index, is_new) = match index_bucket { - Some(index) => index, - None => { - return Err(HashSetError::Full); - } - }; - - match is_new { - // The visited hash set cell points to a value in the array. - false => { - if self.insert_into_occupied_cell(index, value, current_sequence_number)? { - return Ok(index); - } - } - true => { - // PANICS: We trust the bounds of `index`. - let bucket = self.get_bucket_mut(index).unwrap(); - - *bucket = Some(HashSetCell { - value: bigint_to_be_bytes_array(value)?, - sequence_number: None, - }); - return Ok(index); - } - } - Err(HashSetError::Full) - } - - /// Finds an index of the provided `value` inside `buckets`. - /// - /// Uses the optional `current_sequence_number` arguments for checking the - /// validity of the element. - pub fn find_element_index( - &self, - value: &BigUint, - current_sequence_number: Option, - ) -> Result, HashSetError> { - for i in 0..ITERATIONS { - let probe_index = self.probe_index(value, i); - // PANICS: `probe_index()` ensures the bounds. - let bucket = self.get_bucket(probe_index).unwrap(); - match bucket { - Some(bucket) => { - if &bucket.value_biguint() == value { - match current_sequence_number { - // If the caller provided `current_sequence_number`, - // check the validity of the bucket. - Some(current_sequence_number) => { - if bucket.is_valid(current_sequence_number) { - return Ok(Some(probe_index)); - } - continue; - } - None => return Ok(Some(probe_index)), - } - } - continue; - } - // If we found an empty bucket, it means that there is no - // chance of our element existing in the hash set. - None => { - return Ok(None); - } - } - } - - Ok(None) - } - - pub fn find_element( - &self, - value: &BigUint, - current_sequence_number: Option, - ) -> Result, HashSetError> { - let index = self.find_element_index(value, current_sequence_number)?; - match index { - Some(index) => { - let bucket = self.get_bucket(index).unwrap(); - match bucket { - Some(bucket) => Ok(Some((bucket, index))), - None => Ok(None), - } - } - None => Ok(None), - } - } - - pub fn find_element_mut( - &mut self, - value: &BigUint, - current_sequence_number: Option, - ) -> Result, HashSetError> { - let index = self.find_element_index(value, current_sequence_number)?; - match index { - Some(index) => { - let bucket = self.get_bucket_mut(index).unwrap(); - match bucket { - Some(bucket) => Ok(Some((bucket, index))), - None => Ok(None), - } - } - None => Ok(None), - } - } - - /// find_element_iter iterates over a fixed range of elements - /// in the hash set. - /// We always have to iterate over the whole range - /// to make sure that the value is not in the hash-set. - /// Returns the position of the first free value. - pub fn find_element_iter( - &mut self, - value: &BigUint, - current_sequence_number: usize, - start_iter: usize, - num_iterations: usize, - ) -> Result, HashSetError> { - let mut first_free_element: Option<(usize, bool)> = None; - for i in start_iter..start_iter + num_iterations { - let probe_index = self.probe_index(value, i); - let bucket = self.get_bucket(probe_index).unwrap(); - - match bucket { - Some(bucket) => { - let is_valid = bucket.is_valid(current_sequence_number); - if first_free_element.is_none() && !is_valid { - first_free_element = Some((probe_index, false)); - } - if is_valid && &bucket.value_biguint() == value { - return Err(HashSetError::ElementAlreadyExists); - } else { - continue; - } - } - None => { - // A previous bucket could have been freed already even - // though the whole hash set has not been used yet. - if first_free_element.is_none() { - first_free_element = Some((probe_index, true)); - } - // Since we encountered an empty bucket we know for sure - // that the element is not in a bucket with higher probe - // index. - break; - } - } - } - Ok(first_free_element) - } - - /// Returns a first available element. - pub fn first( - &self, - current_sequence_number: usize, - ) -> Result, HashSetError> { - for i in 0..self.capacity { - // PANICS: The loop ensures the bounds. - let bucket = self.get_bucket(i).unwrap(); - if let Some(bucket) = bucket { - if bucket.is_valid(current_sequence_number) { - return Ok(Some(bucket)); - } - } - } - - Ok(None) - } - - /// Returns a first available element that does not have a sequence number. - pub fn first_no_seq(&self) -> Result, HashSetError> { - for i in 0..self.capacity { - // PANICS: The loop ensures the bounds. - let bucket = self.get_bucket(i).unwrap(); - - if let Some(bucket) = bucket { - if bucket.sequence_number.is_none() { - return Ok(Some((*bucket, i as u16))); - } - } - } - - Ok(None) - } - - /// Checks if the hash set contains a value. - pub fn contains( - &self, - value: &BigUint, - sequence_number: Option, - ) -> Result { - let element = self.find_element(value, sequence_number)?; - Ok(element.is_some()) - } - - /// Marks the given element with a given sequence number. - pub fn mark_with_sequence_number( - &mut self, - index: usize, - sequence_number: usize, - ) -> Result<(), HashSetError> { - let sequence_threshold = self.sequence_threshold; - let element = self - .get_bucket_mut(index) - .ok_or(HashSetError::ElementDoesNotExist)?; - - match element { - Some(element) => { - element.sequence_number = Some(sequence_number + sequence_threshold); - Ok(()) - } - None => Err(HashSetError::ElementDoesNotExist), - } - } - - /// Returns an iterator over elements. - pub fn iter(&self) -> HashSetIterator { - HashSetIterator { - hash_set: self, - current: 0, - } - } -} - -impl Drop for HashSet { - fn drop(&mut self) { - // SAFETY: As long as `next_value_index`, `capacity_indices` and - // `capacity_values` are correct, this deallocaion is safe. - unsafe { - let layout = Layout::array::>(self.capacity).unwrap(); - alloc::dealloc(self.buckets.as_ptr() as *mut u8, layout); - } - } -} - -impl PartialEq for HashSet { - fn eq(&self, other: &Self) -> bool { - self.capacity.eq(&other.capacity) - && self.sequence_threshold.eq(&other.sequence_threshold) - && self.iter().eq(other.iter()) - } -} - -pub struct HashSetIterator<'a> { - hash_set: &'a HashSet, - current: usize, -} - -impl<'a> Iterator for HashSetIterator<'a> { - type Item = (usize, &'a HashSetCell); - - fn next(&mut self) -> Option { - while self.current < self.hash_set.get_capacity() { - let element_index = self.current; - self.current += 1; - - if let Some(Some(cur_element)) = self.hash_set.get_bucket(element_index) { - return Some((element_index, cur_element)); - } - } - None - } -} - -#[cfg(test)] -mod test { - use ark_bn254::Fr; - use ark_ff::UniformRand; - use rand::{thread_rng, Rng}; - - use crate::zero_copy::HashSetZeroCopy; - - use super::*; - - #[test] - fn test_is_valid() { - let mut rng = thread_rng(); - - let cell = HashSetCell { - value: [0u8; 32], - sequence_number: None, - }; - // It should be always valid, no matter the sequence number. - assert!(cell.is_valid(0)); - for _ in 0..100 { - let seq: usize = rng.gen(); - assert!(cell.is_valid(seq)); - } - - let cell = HashSetCell { - value: [0u8; 32], - sequence_number: Some(2400), - }; - // Sequence numbers up to 2400 should succeed. - for i in 0..2400 { - assert!(cell.is_valid(i)); - } - for i in 2400..10000 { - assert!(!cell.is_valid(i)); - } - } - - /// Manual test cases. A simple check whether basic properties of the hash - /// set work. - #[test] - fn test_hash_set_manual() { - let mut hs = HashSet::new(256, 4).unwrap(); - - // Insert an element and immediately mark it with a sequence number. - // An equivalent to a single insertion in Light Protocol - let element_1_1 = 1.to_biguint().unwrap(); - let index_1_1 = hs.insert(&element_1_1, 0).unwrap(); - hs.mark_with_sequence_number(index_1_1, 1).unwrap(); - - // Check if element exists in the set. - assert!(hs.contains(&element_1_1, Some(1)).unwrap()); - // Try inserting the same element, even though we didn't reach the - // threshold. - assert!(matches!( - hs.insert(&element_1_1, 1), - Err(HashSetError::ElementAlreadyExists) - )); - - // Insert multiple elements and mark them with one sequence number. - // An equivalent to a batched insertion in Light Protocol. - - let element_2_3 = 3.to_biguint().unwrap(); - let element_2_6 = 6.to_biguint().unwrap(); - let element_2_8 = 8.to_biguint().unwrap(); - let element_2_9 = 9.to_biguint().unwrap(); - let index_2_3 = hs.insert(&element_2_3, 1).unwrap(); - let index_2_6 = hs.insert(&element_2_6, 1).unwrap(); - let index_2_8 = hs.insert(&element_2_8, 1).unwrap(); - let index_2_9 = hs.insert(&element_2_9, 1).unwrap(); - assert!(hs.contains(&element_2_3, Some(2)).unwrap()); - assert!(hs.contains(&element_2_6, Some(2)).unwrap()); - assert!(hs.contains(&element_2_8, Some(2)).unwrap()); - assert!(hs.contains(&element_2_9, Some(2)).unwrap()); - hs.mark_with_sequence_number(index_2_3, 2).unwrap(); - hs.mark_with_sequence_number(index_2_6, 2).unwrap(); - hs.mark_with_sequence_number(index_2_8, 2).unwrap(); - hs.mark_with_sequence_number(index_2_9, 2).unwrap(); - assert!(matches!( - hs.insert(&element_2_3, 2), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_6, 2), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_8, 2), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_9, 2), - Err(HashSetError::ElementAlreadyExists) - )); - - let element_3_11 = 11.to_biguint().unwrap(); - let element_3_13 = 13.to_biguint().unwrap(); - let element_3_21 = 21.to_biguint().unwrap(); - let element_3_29 = 29.to_biguint().unwrap(); - let index_3_11 = hs.insert(&element_3_11, 2).unwrap(); - let index_3_13 = hs.insert(&element_3_13, 2).unwrap(); - let index_3_21 = hs.insert(&element_3_21, 2).unwrap(); - let index_3_29 = hs.insert(&element_3_29, 2).unwrap(); - assert!(hs.contains(&element_3_11, Some(3)).unwrap()); - assert!(hs.contains(&element_3_13, Some(3)).unwrap()); - assert!(hs.contains(&element_3_21, Some(3)).unwrap()); - assert!(hs.contains(&element_3_29, Some(3)).unwrap()); - hs.mark_with_sequence_number(index_3_11, 3).unwrap(); - hs.mark_with_sequence_number(index_3_13, 3).unwrap(); - hs.mark_with_sequence_number(index_3_21, 3).unwrap(); - hs.mark_with_sequence_number(index_3_29, 3).unwrap(); - assert!(matches!( - hs.insert(&element_3_11, 3), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_3_13, 3), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_3_21, 3), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_3_29, 3), - Err(HashSetError::ElementAlreadyExists) - )); - - let element_4_93 = 93.to_biguint().unwrap(); - let element_4_65 = 64.to_biguint().unwrap(); - let element_4_72 = 72.to_biguint().unwrap(); - let element_4_15 = 15.to_biguint().unwrap(); - let index_4_93 = hs.insert(&element_4_93, 3).unwrap(); - let index_4_65 = hs.insert(&element_4_65, 3).unwrap(); - let index_4_72 = hs.insert(&element_4_72, 3).unwrap(); - let index_4_15 = hs.insert(&element_4_15, 3).unwrap(); - assert!(hs.contains(&element_4_93, Some(4)).unwrap()); - assert!(hs.contains(&element_4_65, Some(4)).unwrap()); - assert!(hs.contains(&element_4_72, Some(4)).unwrap()); - assert!(hs.contains(&element_4_15, Some(4)).unwrap()); - hs.mark_with_sequence_number(index_4_93, 4).unwrap(); - hs.mark_with_sequence_number(index_4_65, 4).unwrap(); - hs.mark_with_sequence_number(index_4_72, 4).unwrap(); - hs.mark_with_sequence_number(index_4_15, 4).unwrap(); - - // Try inserting the same elements we inserted before. - // - // Ones with the sequence number difference lower or equal to the - // sequence threshold (4) will fail. - // - // Ones with the higher dif will succeed. - assert!(matches!( - hs.insert(&element_1_1, 4), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_3, 5), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_6, 5), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_8, 5), - Err(HashSetError::ElementAlreadyExists) - )); - assert!(matches!( - hs.insert(&element_2_9, 5), - Err(HashSetError::ElementAlreadyExists) - )); - hs.insert(&element_1_1, 5).unwrap(); - hs.insert(&element_2_3, 6).unwrap(); - hs.insert(&element_2_6, 6).unwrap(); - hs.insert(&element_2_8, 6).unwrap(); - hs.insert(&element_2_9, 6).unwrap(); - } - - /// Test cases with random prime field elements. - #[ignore = "overflows the stack"] - #[test] - fn test_hash_set_random() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - // The hash set should be empty. - assert_eq!(hs.first(0).unwrap(), None); - let mut rng = thread_rng(); - let mut seq = 0; - let nullifiers: [BigUint; 24000] = - std::array::from_fn(|_| BigUint::from(Fr::rand(&mut rng))); - for nf_chunk in nullifiers.chunks(2400) { - for nullifier in nf_chunk.iter() { - assert!(hs.contains(nullifier, Some(seq)).unwrap()); - let index = hs.insert(nullifier, seq).unwrap(); - assert!(hs.contains(nullifier, Some(seq)).unwrap()); - - let nullifier_bytes = bigint_to_be_bytes_array(nullifier).unwrap(); - - let element = hs.find_element(nullifier, Some(seq)).unwrap().unwrap().0; - assert_eq!( - *element, - HashSetCell { - value: bigint_to_be_bytes_array(nullifier).unwrap(), - sequence_number: None, - } - ); - assert_eq!(element.value_bytes(), nullifier_bytes); - assert_eq!(&element.value_biguint(), nullifier); - assert_eq!(element.sequence_number(), None); - assert!(!element.is_marked()); - assert!(element.is_valid(seq)); - - hs.mark_with_sequence_number(index, seq).unwrap(); - let element = hs.find_element(nullifier, Some(seq)).unwrap().unwrap().0; - - assert_eq!( - *element, - HashSetCell { - value: nullifier_bytes, - sequence_number: Some(2400 + seq) - } - ); - assert_eq!(element.value_bytes(), nullifier_bytes); - assert_eq!(&element.value_biguint(), nullifier); - assert_eq!(element.sequence_number(), Some(2400 + seq)); - assert!(element.is_marked()); - assert!(element.is_valid(seq)); - - // Trying to insert the same nullifier, before reaching the - // sequence threshold, should fail. - assert!(matches!( - hs.insert(nullifier, seq + 2399), - Err(HashSetError::ElementAlreadyExists), - )); - seq += 1; - } - seq += 2400; - } - } - - fn hash_set_from_bytes_copy< - const CAPACITY: usize, - const SEQUENCE_THRESHOLD: usize, - const OPERATIONS: usize, - >() { - let mut hs_1 = HashSet::new(CAPACITY, SEQUENCE_THRESHOLD).unwrap(); - - let mut rng = thread_rng(); - - // Create a buffer with random bytes. - let mut bytes = vec![0u8; HashSet::size_in_account(CAPACITY)]; - rng.fill(bytes.as_mut_slice()); - - // Initialize a hash set on top of a byte slice. - { - let mut hs_2 = unsafe { - HashSetZeroCopy::from_bytes_zero_copy_init(&mut bytes, CAPACITY, SEQUENCE_THRESHOLD) - .unwrap() - }; - - for seq in 0..OPERATIONS { - let value = BigUint::from(Fr::rand(&mut rng)); - hs_1.insert(&value, seq).unwrap(); - hs_2.insert(&value, seq).unwrap(); - } - - assert_eq!(hs_1, *hs_2); - } - - // Create a copy on top of a byte slice. - { - let hs_2 = unsafe { HashSet::from_bytes_copy(&mut bytes).unwrap() }; - assert_eq!(hs_1, hs_2); - } - } - - #[test] - fn test_hash_set_from_bytes_copy_6857_2400_3600() { - hash_set_from_bytes_copy::<6857, 2400, 3600>() - } - - #[test] - fn test_hash_set_from_bytes_copy_9601_2400_5000() { - hash_set_from_bytes_copy::<9601, 2400, 5000>() - } - - fn hash_set_full() { - for _ in 0..100 { - let mut hs = HashSet::new(CAPACITY, SEQUENCE_THRESHOLD).unwrap(); - - let mut rng = rand::thread_rng(); - - // Insert as many values as possible. The important point is to - // encounter the `HashSetError::Full` at some point - for i in 0..CAPACITY { - let value = BigUint::from(Fr::rand(&mut rng)); - match hs.insert(&value, 0) { - Ok(index) => hs.mark_with_sequence_number(index, 0).unwrap(), - Err(e) => { - assert!(matches!(e, HashSetError::Full)); - println!("initial insertions: {i}: failed, stopping"); - break; - } - } - } - - // Keep inserting. It should mostly fail, although there might be - // also some successful insertions - there might be values which - // will end up in unused buckets. - for i in 0..1000 { - let value = BigUint::from(Fr::rand(&mut rng)); - let res = hs.insert(&value, 0); - if res.is_err() { - assert!(matches!(res, Err(HashSetError::Full))); - } else { - println!("secondary insertions: {i}: apparent success with value: {value:?}"); - } - } - - // Try again with defined sequence numbers, but still too small to - // vacate any cell. - for i in 0..1000 { - let value = BigUint::from(Fr::rand(&mut rng)); - // Sequence numbers lower than the threshold should not vacate - // any cell. - let sequence_number = rng.gen_range(0..hs.sequence_threshold); - let res = hs.insert(&value, sequence_number); - if res.is_err() { - assert!(matches!(res, Err(HashSetError::Full))); - } else { - println!("tertiary insertions: {i}: surprising success with value: {value:?}"); - } - } - - // Use sequence numbers which are going to vacate cells. All - // insertions should be successful now. - for i in 0..CAPACITY { - let value = BigUint::from(Fr::rand(&mut rng)); - if let Err(e) = hs.insert(&value, SEQUENCE_THRESHOLD + i) { - assert!(matches!(e, HashSetError::Full)); - println!("insertions after fillup: {i}: failed, stopping"); - break; - } - } - } - } - - #[test] - fn test_hash_set_full_6857_2400() { - hash_set_full::<6857, 2400>() - } - - #[test] - fn test_hash_set_full_9601_2400() { - hash_set_full::<9601, 2400>() - } - - #[test] - fn test_hash_set_element_does_not_exist() { - let mut hs = HashSet::new(4800, 2400).unwrap(); - - let mut rng = thread_rng(); - - for _ in 0..1000 { - let index = rng.gen_range(0..4800); - - // Assert `ElementDoesNotExist` error. - let res = hs.mark_with_sequence_number(index, 0); - assert!(matches!(res, Err(HashSetError::ElementDoesNotExist))); - } - - for _ in 0..1000 { - // After actually appending the value, the same operation should be - // possible - let value = BigUint::from(Fr::rand(&mut rng)); - let index = hs.insert(&value, 0).unwrap(); - hs.mark_with_sequence_number(index, 1).unwrap(); - } - } - - #[test] - fn test_hash_set_iter_manual() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - let nullifier_1 = 945635_u32.to_biguint().unwrap(); - let nullifier_2 = 3546656654734254353455_u128.to_biguint().unwrap(); - let nullifier_3 = 543543656564_u64.to_biguint().unwrap(); - let nullifier_4 = 43_u8.to_biguint().unwrap(); - let nullifier_5 = 0_u8.to_biguint().unwrap(); - let nullifier_6 = 65423_u32.to_biguint().unwrap(); - let nullifier_7 = 745654665_u32.to_biguint().unwrap(); - let nullifier_8 = 97664353453465354645645465_u128.to_biguint().unwrap(); - let nullifier_9 = 453565465464565635475_u128.to_biguint().unwrap(); - let nullifier_10 = 543645654645_u64.to_biguint().unwrap(); - - hs.insert(&nullifier_1, 0).unwrap(); - hs.insert(&nullifier_2, 0).unwrap(); - hs.insert(&nullifier_3, 0).unwrap(); - hs.insert(&nullifier_4, 0).unwrap(); - hs.insert(&nullifier_5, 0).unwrap(); - hs.insert(&nullifier_6, 0).unwrap(); - hs.insert(&nullifier_7, 0).unwrap(); - hs.insert(&nullifier_8, 0).unwrap(); - hs.insert(&nullifier_9, 0).unwrap(); - hs.insert(&nullifier_10, 0).unwrap(); - - let inserted_nullifiers = hs - .iter() - .map(|(_, nullifier)| nullifier.value_biguint()) - .collect::>(); - assert_eq!(inserted_nullifiers.len(), 10); - assert_eq!(inserted_nullifiers[0], nullifier_7); - assert_eq!(inserted_nullifiers[1], nullifier_3); - assert_eq!(inserted_nullifiers[2], nullifier_10); - assert_eq!(inserted_nullifiers[3], nullifier_1); - assert_eq!(inserted_nullifiers[4], nullifier_8); - assert_eq!(inserted_nullifiers[5], nullifier_5); - assert_eq!(inserted_nullifiers[6], nullifier_4); - assert_eq!(inserted_nullifiers[7], nullifier_2); - assert_eq!(inserted_nullifiers[8], nullifier_9); - assert_eq!(inserted_nullifiers[9], nullifier_6); - } - - fn hash_set_iter_random< - const INSERTIONS: usize, - const CAPACITY: usize, - const SEQUENCE_THRESHOLD: usize, - >() { - let mut hs = HashSet::new(CAPACITY, SEQUENCE_THRESHOLD).unwrap(); - let mut rng = thread_rng(); - - let nullifiers: [BigUint; INSERTIONS] = - std::array::from_fn(|_| BigUint::from(Fr::rand(&mut rng))); - - for nullifier in nullifiers.iter() { - hs.insert(nullifier, 0).unwrap(); - } - - let mut sorted_nullifiers = nullifiers.iter().collect::>(); - let mut inserted_nullifiers = hs - .iter() - .map(|(_, nullifier)| nullifier.value_biguint()) - .collect::>(); - sorted_nullifiers.sort(); - inserted_nullifiers.sort(); - - let inserted_nullifiers = inserted_nullifiers.iter().collect::>(); - assert_eq!(inserted_nullifiers.len(), INSERTIONS); - assert_eq!(sorted_nullifiers.as_slice(), inserted_nullifiers.as_slice()); - } - - #[test] - fn test_hash_set_iter_random_6857_2400() { - hash_set_iter_random::<3500, 6857, 2400>() - } - - #[test] - fn test_hash_set_iter_random_9601_2400() { - hash_set_iter_random::<5000, 9601, 2400>() - } - - #[test] - fn test_hash_set_get_bucket() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - for i in 0..3600 { - let bn_i = i.to_biguint().unwrap(); - hs.insert(&bn_i, i).unwrap(); - } - let mut unused_indices = vec![true; 6857]; - for i in 0..3600 { - let bn_i = i.to_biguint().unwrap(); - let i = hs.find_element_index(&bn_i, None).unwrap().unwrap(); - let element = hs.get_bucket(i).unwrap().unwrap(); - assert_eq!(element.value_biguint(), bn_i); - unused_indices[i] = false; - } - // Unused cells within the capacity should be `Some(None)`. - for i in unused_indices.iter().enumerate() { - if *i.1 { - assert!(hs.get_bucket(i.0).unwrap().is_none()); - } - } - // Cells over the capacity should be `None`. - for i in 6857..10_000 { - assert!(hs.get_bucket(i).is_none()); - } - } - - #[test] - fn test_hash_set_get_bucket_mut() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - for i in 0..3600 { - let bn_i = i.to_biguint().unwrap(); - hs.insert(&bn_i, i).unwrap(); - } - let mut unused_indices = vec![false; 6857]; - - for i in 0..3600 { - let bn_i = i.to_biguint().unwrap(); - let i = hs.find_element_index(&bn_i, None).unwrap().unwrap(); - - let element = hs.get_bucket_mut(i).unwrap(); - assert_eq!(element.unwrap().value_biguint(), bn_i); - unused_indices[i] = true; - - // "Nullify" the element. - *element = Some(HashSetCell { - value: [0_u8; 32], - sequence_number: None, - }); - } - - for (i, is_used) in unused_indices.iter().enumerate() { - if *is_used { - let element = hs.get_bucket_mut(i).unwrap().unwrap(); - assert_eq!(element.value_bytes(), [0_u8; 32]); - } - } - // Unused cells within the capacity should be `Some(None)`. - for (i, is_used) in unused_indices.iter().enumerate() { - if !*is_used { - assert!(hs.get_bucket_mut(i).unwrap().is_none()); - } - } - // Cells over the capacity should be `None`. - for i in 6857..10_000 { - assert!(hs.get_bucket_mut(i).is_none()); - } - } - - #[test] - fn test_hash_set_get_unmarked_bucket() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - // Insert incremental elements, so they end up being in the same - // sequence in the hash set. - (0..3600).for_each(|i| { - let bn_i = i.to_biguint().unwrap(); - hs.insert(&bn_i, i).unwrap(); - }); - - for i in 0..3600 { - let i = hs - .find_element_index(&i.to_biguint().unwrap(), None) - .unwrap() - .unwrap(); - let element = hs.get_unmarked_bucket(i); - assert!(element.is_some()); - } - - // Mark the elements. - for i in 0..3600 { - let index = hs - .find_element_index(&i.to_biguint().unwrap(), None) - .unwrap() - .unwrap(); - hs.mark_with_sequence_number(index, i).unwrap(); - } - - for i in 0..3600 { - let i = hs - .find_element_index(&i.to_biguint().unwrap(), None) - .unwrap() - .unwrap(); - let element = hs.get_unmarked_bucket(i); - assert!(element.is_none()); - } - } - - #[test] - fn test_hash_set_first_no_seq() { - let mut hs = HashSet::new(6857, 2400).unwrap(); - - // Insert incremental elements, so they end up being in the same - // sequence in the hash set. - for i in 0..3600 { - let bn_i = i.to_biguint().unwrap(); - hs.insert(&bn_i, i).unwrap(); - - let element = hs.first_no_seq().unwrap().unwrap(); - assert_eq!(element.0.value_biguint(), 0.to_biguint().unwrap()); - } - } -} diff --git a/hash-set/src/zero_copy.rs b/hash-set/src/zero_copy.rs deleted file mode 100644 index e4e35d6..0000000 --- a/hash-set/src/zero_copy.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::{ - marker::PhantomData, - mem, - ops::{Deref, DerefMut}, - ptr::NonNull, -}; - -use crate::{HashSet, HashSetCell, HashSetError}; - -/// A `HashSet` wrapper which can be instantiated from Solana account bytes -/// without copying them. -#[derive(Debug)] -pub struct HashSetZeroCopy<'a> { - pub hash_set: mem::ManuallyDrop, - _marker: PhantomData<&'a ()>, -} - -impl<'a> HashSetZeroCopy<'a> { - // TODO(vadorovsky): Add a non-mut method: `from_bytes_zero_copy`. - - /// Casts a byte slice into `HashSet`. - /// - /// # Purpose - /// - /// This method is meant to be used mostly in Solana programs, where memory - /// constraints are tight and we want to make sure no data is copied. - /// - /// # Safety - /// - /// This is highly unsafe. Ensuring the alignment and that the slice - /// provides actual data of the hash set is the caller's responsibility. - /// - /// Calling it in async context (or anyhwere where the underlying data can - /// be moved in the memory) is certainly going to cause undefined behavior. - pub unsafe fn from_bytes_zero_copy_mut(bytes: &'a mut [u8]) -> Result { - if bytes.len() < HashSet::non_dyn_fields_size() { - return Err(HashSetError::BufferSize( - HashSet::non_dyn_fields_size(), - bytes.len(), - )); - } - - let capacity_values = usize::from_le_bytes(bytes[0..8].try_into().unwrap()); - let sequence_threshold = usize::from_le_bytes(bytes[8..16].try_into().unwrap()); - - let offset = HashSet::non_dyn_fields_size() + mem::size_of::(); - - let values_size = mem::size_of::>() * capacity_values; - - let expected_size = HashSet::non_dyn_fields_size() + values_size; - if bytes.len() < expected_size { - return Err(HashSetError::BufferSize(expected_size, bytes.len())); - } - - let buckets = - NonNull::new(bytes.as_mut_ptr().add(offset) as *mut Option).unwrap(); - - Ok(Self { - hash_set: mem::ManuallyDrop::new(HashSet { - capacity: capacity_values, - sequence_threshold, - buckets, - }), - _marker: PhantomData, - }) - } - - /// Casts a byte slice into `HashSet` and then initializes it. - /// - /// * `bytes` is casted into a reference of `HashSet` and used as - /// storage for the struct. - /// * `capacity_indices` indicates the size of the indices table. It should - /// already include a desired load factor and be greater than the expected - /// number of elements to avoid filling the set too early and avoid - /// creating clusters. - /// * `capacity_values` indicates the size of the values array. It should be - /// equal to the number of expected elements, without load factor. - /// * `sequence_threshold` indicates a difference of sequence numbers which - /// make elements of the has set expired. Expiration means that they can - /// be replaced during insertion of new elements with sequence numbers - /// higher by at least a threshold. - /// - /// # Purpose - /// - /// This method is meant to be used mostly in Solana programs to initialize - /// a new account which is supposed to store the hash set. - /// - /// # Safety - /// - /// This is highly unsafe. Ensuring the alignment and that the slice has - /// a correct size, which is able to fit the hash set, is the caller's - /// responsibility. - /// - /// Calling it in async context (or anywhere where the underlying data can - /// be moved in memory) is certainly going to cause undefined behavior. - pub unsafe fn from_bytes_zero_copy_init( - bytes: &'a mut [u8], - capacity_values: usize, - sequence_threshold: usize, - ) -> Result { - if bytes.len() < HashSet::non_dyn_fields_size() { - return Err(HashSetError::BufferSize( - HashSet::non_dyn_fields_size(), - bytes.len(), - )); - } - - bytes[0..8].copy_from_slice(&capacity_values.to_le_bytes()); - bytes[8..16].copy_from_slice(&sequence_threshold.to_le_bytes()); - bytes[16..24].copy_from_slice(&0_usize.to_le_bytes()); - - let hash_set = Self::from_bytes_zero_copy_mut(bytes)?; - - for i in 0..capacity_values { - std::ptr::write(hash_set.hash_set.buckets.as_ptr().add(i), None); - } - - Ok(hash_set) - } -} - -impl Drop for HashSetZeroCopy<'_> { - fn drop(&mut self) { - // SAFETY: Don't do anything here! Why? - // - // * Primitive fields of `HashSet` implement `Copy`, therefore `drop()` - // has no effect on them - Rust drops them when they go out of scope. - // * Don't drop the dynamic fields (`indices` and `values`). In - // `HashSetZeroCopy`, they are backed by buffers provided by the - // caller. These buffers are going to be eventually deallocated. - // Performing an another `drop()` here would result double `free()` - // which would result in aborting the program (either with `SIGABRT` - // or `SIGSEGV`). - } -} - -impl Deref for HashSetZeroCopy<'_> { - type Target = HashSet; - - fn deref(&self) -> &Self::Target { - &self.hash_set - } -} - -impl DerefMut for HashSetZeroCopy<'_> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.hash_set - } -} - -#[cfg(test)] -mod test { - use ark_bn254::Fr; - use ark_ff::UniformRand; - use num_bigint::BigUint; - use rand::{thread_rng, Rng}; - - use super::*; - - #[test] - fn test_load_from_bytes() { - const VALUES: usize = 4800; - const SEQUENCE_THRESHOLD: usize = 2400; - - // Create a buffer with random bytes. - let mut bytes = vec![0u8; HashSet::size_in_account(VALUES)]; - thread_rng().fill(bytes.as_mut_slice()); - - // Create random nullifiers. - let mut rng = thread_rng(); - let nullifiers: [BigUint; 2400] = - std::array::from_fn(|_| BigUint::from(Fr::rand(&mut rng))); - - // Initialize a hash set on top of a byte slice. - { - let mut hs = unsafe { - HashSetZeroCopy::from_bytes_zero_copy_init( - bytes.as_mut_slice(), - VALUES, - SEQUENCE_THRESHOLD, - ) - .unwrap() - }; - - // Ensure that the underlying data were properly initialized. - assert_eq!(hs.hash_set.get_capacity(), VALUES); - assert_eq!(hs.hash_set.sequence_threshold, SEQUENCE_THRESHOLD); - for i in 0..VALUES { - assert!(unsafe { &*hs.hash_set.buckets.as_ptr().add(i) }.is_none()); - } - - for (seq, nullifier) in nullifiers.iter().enumerate() { - let index = hs.insert(nullifier, seq).unwrap(); - hs.mark_with_sequence_number(index, seq).unwrap(); - } - } - - // Read the hash set from buffers again. - { - let mut hs = - unsafe { HashSetZeroCopy::from_bytes_zero_copy_mut(bytes.as_mut_slice()).unwrap() }; - - for (seq, nullifier) in nullifiers.iter().enumerate() { - assert!(hs.contains(nullifier, Some(seq)).unwrap()); - } - - for (seq, nullifier) in nullifiers.iter().enumerate() { - hs.insert(nullifier, 2400 + seq).unwrap(); - } - drop(hs); - } - - // Make a copy of hash set from the same buffers. - { - let hs = unsafe { HashSet::from_bytes_copy(bytes.as_mut_slice()).unwrap() }; - - for (seq, nullifier) in nullifiers.iter().enumerate() { - assert!(hs.contains(nullifier, Some(2400 + seq)).unwrap()); - } - } - } - - #[test] - fn test_buffer_size_error() { - const VALUES: usize = 4800; - const SEQUENCE_THRESHOLD: usize = 2400; - - let mut invalid_bytes = vec![0_u8; 256]; - - let res = unsafe { - HashSetZeroCopy::from_bytes_zero_copy_init( - invalid_bytes.as_mut_slice(), - VALUES, - SEQUENCE_THRESHOLD, - ) - }; - assert!(matches!(res, Err(HashSetError::BufferSize(_, _)))); - } -} diff --git a/indexed/Cargo.lock b/indexed/Cargo.lock deleted file mode 100644 index 3baf21f..0000000 --- a/indexed/Cargo.lock +++ /dev/null @@ -1,1944 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom 0.2.15", - "once_cell", - "version_check", -] - -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "anchor-attribute-access-control" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5f619f1d04f53621925ba8a2e633ba5a6081f2ae14758cbb67f38fd823e0a3e" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-account" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f2a3e1df4685f18d12a943a9f2a7456305401af21a07c9fe076ef9ecd6e400" -dependencies = [ - "anchor-syn", - "bs58 0.5.1", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-constant" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9423945cb55627f0b30903288e78baf6f62c6c8ab28fb344b6b25f1ffee3dca7" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-error" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ed12720033cc3c3bf3cfa293349c2275cd5ab99936e33dd4bf283aaad3e241" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-event" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eef4dc0371eba2d8c8b54794b0b0eb786a234a559b77593d6f80825b6d2c77a2" -dependencies = [ - "anchor-syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-attribute-program" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b18c4f191331e078d4a6a080954d1576241c29c56638783322a18d308ab27e4f" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-accounts" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de10d6e9620d3bcea56c56151cad83c5992f50d5960b3a9bebc4a50390ddc3c" -dependencies = [ - "anchor-syn", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-serde" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4e2e5be518ec6053d90a2a7f26843dbee607583c779e6c8395951b9739bdfbe" -dependencies = [ - "anchor-syn", - "borsh-derive-internal 0.10.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-derive-space" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecc31d19fa54840e74b7a979d44bcea49d70459de846088a1d71e87ba53c419" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "anchor-lang" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35da4785497388af0553586d55ebdc08054a8b1724720ef2749d313494f2b8ad" -dependencies = [ - "anchor-attribute-access-control", - "anchor-attribute-account", - "anchor-attribute-constant", - "anchor-attribute-error", - "anchor-attribute-event", - "anchor-attribute-program", - "anchor-derive-accounts", - "anchor-derive-serde", - "anchor-derive-space", - "arrayref", - "base64 0.13.1", - "bincode", - "borsh 0.10.4", - "bytemuck", - "getrandom 0.2.15", - "solana-program", - "thiserror", -] - -[[package]] -name = "anchor-syn" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9101b84702fed2ea57bd22992f75065da5648017135b844283a2f6d74f27825" -dependencies = [ - "anyhow", - "bs58 0.5.1", - "heck", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2 0.10.8", - "syn 1.0.109", - "thiserror", -] - -[[package]] -name = "anyhow" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" - -[[package]] -name = "ark-bn254" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a22f4561524cd949590d78d7d4c5df8f592430d221f7f3c9497bbafd8972120f" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-std", -] - -[[package]] -name = "ark-ec" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defd9a439d56ac24968cca0571f598a61bc8c55f71d50a89cda591cb750670ba" -dependencies = [ - "ark-ff", - "ark-poly", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", - "itertools", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm", - "ark-ff-macros", - "ark-serialize", - "ark-std", - "derivative", - "digest 0.10.7", - "itertools", - "num-bigint", - "num-traits", - "paste", - "rustc_version", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-poly" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d320bfc44ee185d899ccbadfa8bc31aab923ce1558716e1997a1e74057fe86bf" -dependencies = [ - "ark-ff", - "ark-serialize", - "ark-std", - "derivative", - "hashbrown 0.13.2", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-serialize-derive", - "ark-std", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" -dependencies = [ - "serde", -] - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "blake3" -version = "1.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", - "digest 0.10.7", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "borsh" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa" -dependencies = [ - "borsh-derive 0.9.3", - "hashbrown 0.11.2", -] - -[[package]] -name = "borsh" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115e54d64eb62cdebad391c19efc9dce4981c690c85a33a12199d99bb9546fee" -dependencies = [ - "borsh-derive 0.10.4", - "hashbrown 0.13.2", -] - -[[package]] -name = "borsh" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" -dependencies = [ - "borsh-derive 1.5.3", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775" -dependencies = [ - "borsh-derive-internal 0.9.3", - "borsh-schema-derive-internal 0.9.3", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831213f80d9423998dd696e2c5345aba6be7a0bd8cd19e31c5243e13df1cef89" -dependencies = [ - "borsh-derive-internal 0.10.4", - "borsh-schema-derive-internal 0.10.4", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" -dependencies = [ - "once_cell", - "proc-macro-crate 3.2.0", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65d6ba50644c98714aa2a70d13d7df3cd75cd2b523a2b452bf010443800976b3" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276691d96f063427be83e6692b86148e488ebba9f48f77788724ca027ba3b6d4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "bv" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" -dependencies = [ - "feature-probe", - "serde", -] - -[[package]] -name = "bytemuck" -version = "1.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "cc" -version = "1.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e" -dependencies = [ - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "console_error_panic_hook" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" -dependencies = [ - "cfg-if", - "wasm-bindgen", -] - -[[package]] -name = "console_log" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89f72f65e8501878b8a004d5a1afb780987e2ce2b4532c562e367a72c57499f" -dependencies = [ - "log", - "web-sys", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - -[[package]] -name = "cpufeatures" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" -dependencies = [ - "libc", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array", - "subtle", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "serde", - "subtle", - "zeroize", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer 0.10.4", - "crypto-common", - "subtle", -] - -[[package]] -name = "either" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "feature-probe" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "serde", - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash 0.7.8", -] - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash 0.8.11", -] - -[[package]] -name = "hashbrown" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" - -[[package]] -name = "heck" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hmac" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" -dependencies = [ - "crypto-mac", - "digest 0.9.0", -] - -[[package]] -name = "hmac-drbg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" -dependencies = [ - "digest 0.9.0", - "generic-array", - "hmac", -] - -[[package]] -name = "im" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" -dependencies = [ - "bitmaps", - "rand_core 0.6.4", - "rand_xoshiro", - "rayon", - "serde", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "indexmap" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" -dependencies = [ - "equivalent", - "hashbrown 0.15.2", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" - -[[package]] -name = "jobserver" -version = "0.1.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" -dependencies = [ - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "libc" -version = "0.2.169" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - -[[package]] -name = "libsecp256k1" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" -dependencies = [ - "arrayref", - "base64 0.12.3", - "digest 0.9.0", - "hmac-drbg", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", - "rand 0.7.3", - "serde", - "sha2 0.9.9", - "typenum", -] - -[[package]] -name = "libsecp256k1-core" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" -dependencies = [ - "crunchy", - "digest 0.9.0", - "subtle", -] - -[[package]] -name = "libsecp256k1-gen-ecmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "libsecp256k1-gen-genmult" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" -dependencies = [ - "libsecp256k1-core", -] - -[[package]] -name = "light-bounded-vec" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47ced86d6f1b163a04d5d0be44f8bbeedb11d32f73af27812bbd144e0f1f1a42" -dependencies = [ - "bytemuck", - "memoffset", - "solana-program", - "thiserror", -] - -[[package]] -name = "light-concurrent-merkle-tree" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d84ffa991f38260b12f9fb60e7087aeb2d696513936eaee249e6f3e739651a" -dependencies = [ - "borsh 0.10.4", - "bytemuck", - "light-bounded-vec", - "light-hasher", - "light-utils", - "memoffset", - "solana-program", - "thiserror", -] - -[[package]] -name = "light-hash-set" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7cd392ed4df05a545dfb8a58ef72639fbba9dee7a0605b81c3370d02161932b" -dependencies = [ - "light-bounded-vec", - "light-heap", - "light-utils", - "memoffset", - "num-bigint", - "num-traits", - "thiserror", -] - -[[package]] -name = "light-hasher" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932ed98282fa564ff4518416de688593a0f425c81d68cfa70e98da21a17a36f" -dependencies = [ - "ark-bn254", - "light-poseidon", - "sha2 0.10.8", - "sha3", - "solana-program", - "thiserror", -] - -[[package]] -name = "light-heap" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7304b9ef6e32c540b685fb3cb13081db831b8f4ea03d1d5d54491dee19100eb5" -dependencies = [ - "anchor-lang", -] - -[[package]] -name = "light-indexed-merkle-tree" -version = "1.1.0" -dependencies = [ - "borsh 0.10.4", - "hex", - "light-bounded-vec", - "light-concurrent-merkle-tree", - "light-hash-set", - "light-hasher", - "light-merkle-tree-reference", - "light-utils", - "memoffset", - "num-bigint", - "num-traits", - "rand 0.8.5", - "serde", - "serde_json", - "solana-program", - "thiserror", -] - -[[package]] -name = "light-merkle-tree-reference" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9153fd97f1bdba3ec1de6f4c4f20134c6e5e1285676bcb9ef5ebe493f41afa" -dependencies = [ - "light-bounded-vec", - "light-hasher", - "log", - "thiserror", -] - -[[package]] -name = "light-poseidon" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9a85a9752c549ceb7578064b4ed891179d20acd85f27318573b64d2d7ee7ee" -dependencies = [ - "ark-bn254", - "ark-ff", - "num-bigint", - "thiserror", -] - -[[package]] -name = "light-utils" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e2b5a4959cb0456b483a20b4f3930920949137c00e76b5d0f9bf8d701a3c6a" -dependencies = [ - "anyhow", - "ark-bn254", - "ark-ff", - "light-bounded-vec", - "num-bigint", - "rand 0.8.5", - "solana-program", - "thiserror", -] - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "memmap2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "num-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", -] - -[[package]] -name = "once_cell" -version = "1.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" - -[[package]] -name = "opaque-debug" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "pbkdf2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216eaa586a190f0a738f2f918511eecfa90f13295abec0e457cdebcceda80cbd" -dependencies = [ - "crypto-mac", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "proc-macro-crate" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" -dependencies = [ - "toml", -] - -[[package]] -name = "proc-macro-crate" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro2" -version = "1.0.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core 0.6.4", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" -dependencies = [ - "bitflags", -] - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" - -[[package]] -name = "ryu" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "semver" -version = "1.0.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" - -[[package]] -name = "serde" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_bytes" -version = "0.11.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.216" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_json" -version = "1.0.134" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" -dependencies = [ - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" - -[[package]] -name = "solana-frozen-abi" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20a6ef2db80dceb124b7bf81cca3300804bf427d2711973fc3df450ed7dfb26d" -dependencies = [ - "block-buffer 0.10.4", - "bs58 0.4.0", - "bv", - "either", - "generic-array", - "im", - "lazy_static", - "log", - "memmap2", - "rustc_version", - "serde", - "serde_bytes", - "serde_derive", - "sha2 0.10.8", - "solana-frozen-abi-macro", - "subtle", - "thiserror", -] - -[[package]] -name = "solana-frozen-abi-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70088de7d4067d19a7455609e2b393e6086bd847bb39c4d2bf234fc14827ef9e" -dependencies = [ - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.90", -] - -[[package]] -name = "solana-program" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb2b2c8babfae4cace1a25b6efa00418f3acd852cf55d7cecc0360d3c5050479" -dependencies = [ - "ark-bn254", - "ark-ec", - "ark-ff", - "ark-serialize", - "base64 0.21.7", - "bincode", - "bitflags", - "blake3", - "borsh 0.10.4", - "borsh 0.9.3", - "borsh 1.5.3", - "bs58 0.4.0", - "bv", - "bytemuck", - "cc", - "console_error_panic_hook", - "console_log", - "curve25519-dalek", - "getrandom 0.2.15", - "itertools", - "js-sys", - "lazy_static", - "libc", - "libsecp256k1", - "light-poseidon", - "log", - "memoffset", - "num-bigint", - "num-derive", - "num-traits", - "parking_lot", - "rand 0.8.5", - "rustc_version", - "rustversion", - "serde", - "serde_bytes", - "serde_derive", - "serde_json", - "sha2 0.10.8", - "sha3", - "solana-frozen-abi", - "solana-frozen-abi-macro", - "solana-sdk-macro", - "thiserror", - "tiny-bip39", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "solana-sdk-macro" -version = "1.18.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c55c196c8050834c391a34b58e3c9fd86b15452ef1feeeafa1dbeb9d2291dfec" -dependencies = [ - "bs58 0.4.0", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.90", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tiny-bip39" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc59cb9dfc85bb312c3a78fd6aa8a8582e310b0fa885d5bb877f6dcc601839d" -dependencies = [ - "anyhow", - "hmac", - "once_cell", - "pbkdf2", - "rand 0.7.3", - "rustc-hash", - "sha2 0.9.9", - "thiserror", - "unicode-normalization", - "wasm-bindgen", - "zeroize", -] - -[[package]] -name = "tinyvec" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap", - "toml_datetime", - "winnow", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "unicode-ident" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" - -[[package]] -name = "unicode-normalization" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" -dependencies = [ - "cfg-if", - "once_cell", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" - -[[package]] -name = "web-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "byteorder", - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "zeroize" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] diff --git a/indexed/Cargo.toml b/indexed/Cargo.toml deleted file mode 100644 index 7532ea4..0000000 --- a/indexed/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -[package] -name = "light-indexed-merkle-tree" -version = "1.1.0" -description = "Implementation of indexed (and concurrent) Merkle tree in Rust" -repository = "https://github.com/Lightprotocol/light-protocol" -license = "Apache-2.0" -edition = "2021" - -[features] -solana = [ - "light-concurrent-merkle-tree/solana", - "solana-program" -] - -[dependencies] -borsh = { version = "0.10" } -light-bounded-vec = { version = "1.1.0" } -light-hasher = { version = "1.1.0" } -light-concurrent-merkle-tree = { version = "1.1.0" } -light-merkle-tree-reference = { version = "1.1.0" } -light-utils = { version = "1.1.0" } -memoffset = "0.9" -num-bigint = "0.4" -num-traits = "0.2" - -solana-program = { version="=1.18.22", optional = true } -thiserror = "1.0" - -[dev-dependencies] -light-hash-set = { version = "1.1.0" } -thiserror = "1.0" -rand = "0.8" -hex = "0.4" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" \ No newline at end of file diff --git a/indexed/src/array.rs b/indexed/src/array.rs deleted file mode 100644 index 3ef4c57..0000000 --- a/indexed/src/array.rs +++ /dev/null @@ -1,1255 +0,0 @@ -use std::{cmp::Ordering, fmt::Debug, marker::PhantomData}; - -use crate::{errors::IndexedMerkleTreeError, HIGHEST_ADDRESS_PLUS_ONE}; -use light_concurrent_merkle_tree::{event::RawIndexedElement, light_hasher::Hasher}; -use light_utils::bigint::bigint_to_be_bytes_array; -use num_bigint::BigUint; -use num_traits::Zero; -use num_traits::{CheckedAdd, CheckedSub, ToBytes, Unsigned}; - -#[derive(Clone, Debug, Default)] -pub struct IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub index: I, - pub value: BigUint, - pub next_index: I, -} - -impl From> for IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn from(value: RawIndexedElement) -> Self { - IndexedElement { - index: value.index, - value: BigUint::from_bytes_be(&value.value), - next_index: value.next_index, - } - } -} - -impl PartialEq for IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn eq(&self, other: &Self) -> bool { - self.value == other.value - && self.index == other.index - && self.next_index == other.next_index - } -} - -impl Eq for IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ -} - -impl PartialOrd for IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn cmp(&self, other: &Self) -> Ordering { - self.value.cmp(&other.value) - } -} - -impl IndexedElement -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub fn index(&self) -> usize { - self.index.into() - } - - pub fn next_index(&self) -> usize { - self.next_index.into() - } - - pub fn hash(&self, next_value: &BigUint) -> Result<[u8; 32], IndexedMerkleTreeError> - where - H: Hasher, - { - println!("self.value: {:?}", self.value); - println!("self.next_index: {:?}", self.next_index()); - println!("next_value: {:?}", next_value); - let hash = H::hashv(&[ - bigint_to_be_bytes_array::<32>(&self.value)?.as_ref(), - self.next_index.to_be_bytes().as_ref(), - bigint_to_be_bytes_array::<32>(next_value)?.as_ref(), - ])?; - - Ok(hash) - } - - pub fn update_from_raw_element(&mut self, raw_element: &RawIndexedElement) { - self.index = raw_element.index; - self.value = BigUint::from_bytes_be(&raw_element.value); - self.next_index = raw_element.next_index; - } -} - -#[derive(Clone, Debug)] -pub struct IndexedElementBundle -where - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub new_low_element: IndexedElement, - pub new_element: IndexedElement, - pub new_element_next_value: BigUint, -} - -#[derive(Clone, Debug)] -pub struct IndexedArray -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub elements: Vec>, - pub current_node_index: I, - pub highest_element_index: I, - - _hasher: PhantomData, -} - -impl Default for IndexedArray -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn default() -> Self { - Self { - elements: vec![IndexedElement { - index: I::zero(), - value: BigUint::zero(), - next_index: I::zero(), - }], - current_node_index: I::zero(), - highest_element_index: I::zero(), - _hasher: PhantomData, - } - } -} - -impl IndexedArray -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub fn get(&self, index: usize) -> Option<&IndexedElement> { - self.elements.get(index) - } - - pub fn len(&self) -> usize { - self.current_node_index.into() - } - - pub fn is_empty(&self) -> bool { - self.current_node_index == I::zero() - } - - pub fn iter(&self) -> IndexingArrayIter { - IndexingArrayIter { - indexing_array: self, - front: 0, - back: self.current_node_index.into(), - } - } - - pub fn find_element(&self, value: &BigUint) -> Option<&IndexedElement> { - self.elements[..self.len() + 1] - .iter() - .find(|&node| node.value == *value) - } - - pub fn init(&mut self) -> Result, IndexedMerkleTreeError> { - use num_traits::Num; - let init_value = BigUint::from_str_radix(HIGHEST_ADDRESS_PLUS_ONE, 10) - .map_err(|_| IndexedMerkleTreeError::IntegerOverflow)?; - self.append(&init_value) - } - - /// Returns the index of the low element for the given `value`, which is - /// not yet the part of the array. - /// - /// Low element is the greatest element which still has lower value than - /// the provided one. - /// - /// Low elements are used in non-membership proofs. - pub fn find_low_element_index_for_nonexistent( - &self, - value: &BigUint, - ) -> Result { - // Try to find element whose next element is higher than the provided - // value. - for (i, node) in self.elements.iter().enumerate() { - if node.value == *value { - return Err(IndexedMerkleTreeError::ElementAlreadyExists); - } - if self.elements[node.next_index()].value > *value && node.value < *value { - return i - .try_into() - .map_err(|_| IndexedMerkleTreeError::IntegerOverflow); - } - } - // If no such element was found, it means that our value is going to be - // the greatest in the array. This means that the currently greatest - // element is going to be the low element of our value. - Ok(self.highest_element_index) - } - - /// Returns the: - /// - /// * Low element for the given value. - /// * Next value for that low element. - /// - /// For the given `value`, which is not yet the part of the array. - /// - /// Low element is the greatest element which still has lower value than - /// the provided one. - /// - /// Low elements are used in non-membership proofs. - pub fn find_low_element_for_nonexistent( - &self, - value: &BigUint, - ) -> Result<(IndexedElement, BigUint), IndexedMerkleTreeError> { - let low_element_index = self.find_low_element_index_for_nonexistent(value)?; - let low_element = self.elements[usize::from(low_element_index)].clone(); - Ok(( - low_element.clone(), - self.elements[low_element.next_index()].value.clone(), - )) - } - - /// Returns the index of the low element for the given `value`, which is - /// already the part of the array. - /// - /// Low element is the greatest element which still has lower value than - /// the provided one. - /// - /// Low elements are used in non-membership proofs. - pub fn find_low_element_index_for_existent( - &self, - value: &BigUint, - ) -> Result { - for (i, node) in self.elements[..self.len() + 1].iter().enumerate() { - if self.elements[usize::from(node.next_index)].value == *value { - let i = i - .try_into() - .map_err(|_| IndexedMerkleTreeError::IntegerOverflow)?; - return Ok(i); - } - } - Err(IndexedMerkleTreeError::ElementDoesNotExist) - } - - /// Returns the low element for the given `value`, which is already the - /// part of the array. - /// - /// Low element is the greatest element which still has lower value than - /// the provided one. - /// - /// Low elements are used in non-membership proofs. - pub fn find_low_element_for_existent( - &self, - value: &BigUint, - ) -> Result, IndexedMerkleTreeError> { - let low_element_index = self.find_low_element_index_for_existent(value)?; - let low_element = self.elements[usize::from(low_element_index)].clone(); - Ok(low_element) - } - - /// Returns the hash of the given element. That hash consists of: - /// - /// * The value of the given element. - /// * The `next_index` of the given element. - /// * The value of the element pointed by `next_index`. - pub fn hash_element(&self, index: I) -> Result<[u8; 32], IndexedMerkleTreeError> { - let element = self - .elements - .get(usize::from(index)) - .ok_or(IndexedMerkleTreeError::IndexHigherThanMax)?; - let next_element = self - .elements - .get(usize::from(element.next_index)) - .ok_or(IndexedMerkleTreeError::IndexHigherThanMax)?; - element.hash::(&next_element.value) - } - - /// Returns an updated low element and a new element, created based on the - /// provided `low_element_index` and `value`. - pub fn new_element_with_low_element_index( - &self, - low_element_index: I, - value: &BigUint, - ) -> Result, IndexedMerkleTreeError> { - let mut new_low_element = self.elements[usize::from(low_element_index)].clone(); - - let new_element_index = self - .current_node_index - .checked_add(&I::one()) - .ok_or(IndexedMerkleTreeError::IntegerOverflow)?; - let new_element = IndexedElement { - index: new_element_index, - value: value.clone(), - next_index: new_low_element.next_index, - }; - - new_low_element.next_index = new_element_index; - - let new_element_next_value = self.elements[usize::from(new_element.next_index)] - .value - .clone(); - - Ok(IndexedElementBundle { - new_low_element, - new_element, - new_element_next_value, - }) - } - - pub fn new_element( - &self, - value: &BigUint, - ) -> Result, IndexedMerkleTreeError> { - let low_element_index = self.find_low_element_index_for_nonexistent(value)?; - let element = self.new_element_with_low_element_index(low_element_index, value)?; - - Ok(element) - } - - /// Appends the given `value` to the indexing array. - pub fn append_with_low_element_index( - &mut self, - low_element_index: I, - value: &BigUint, - ) -> Result, IndexedMerkleTreeError> { - // TOD0: add length check, and add field to with tree height here - - let old_low_element = &self.elements[usize::from(low_element_index)]; - - // Check that the `value` belongs to the range of `old_low_element`. - if old_low_element.next_index == I::zero() { - // In this case, the `old_low_element` is the greatest element. - // The value of `new_element` needs to be greater than the value of - // `old_low_element` (and therefore, be the greatest). - if value <= &old_low_element.value { - return Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement); - } - } else { - // The value of `new_element` needs to be greater than the value of - // `old_low_element` (and therefore, be the greatest). - if value <= &old_low_element.value { - return Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement); - } - // The value of `new_element` needs to be lower than the value of - // next element pointed by `old_low_element`. - if value >= &self.elements[usize::from(old_low_element.next_index)].value { - return Err(IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement); - } - } - - // Create new node. - let new_element_bundle = - self.new_element_with_low_element_index(low_element_index, value)?; - - // If the old low element wasn't pointing to any element, it means that: - // - // * It used to be the highest element. - // * Our new element, which we are appending, is going the be the - // highest element. - // - // Therefore, we need to save the new element index as the highest - // index. - if old_low_element.next_index == I::zero() { - self.highest_element_index = new_element_bundle.new_element.index; - } - - // Insert new node. - self.current_node_index = new_element_bundle.new_element.index; - self.elements.push(new_element_bundle.new_element.clone()); - - // Update low element. - self.elements[usize::from(low_element_index)] = new_element_bundle.new_low_element.clone(); - - Ok(new_element_bundle) - } - - pub fn append( - &mut self, - value: &BigUint, - ) -> Result, IndexedMerkleTreeError> { - let low_element_index = self.find_low_element_index_for_nonexistent(value)?; - self.append_with_low_element_index(low_element_index, value) - } - - pub fn lowest(&self) -> Option> { - if self.current_node_index < I::one() { - None - } else { - self.elements.get(1).cloned() - } - } -} - -pub struct IndexingArrayIter<'a, H, I> -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - indexing_array: &'a IndexedArray, - front: usize, - back: usize, -} - -impl<'a, H, I> Iterator for IndexingArrayIter<'a, H, I> -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - type Item = &'a IndexedElement; - - fn next(&mut self) -> Option { - if self.front <= self.back { - let result = self.indexing_array.elements.get(self.front); - self.front += 1; - result - } else { - None - } - } -} - -impl DoubleEndedIterator for IndexingArrayIter<'_, H, I> -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - fn next_back(&mut self) -> Option { - if self.back >= self.front { - let result = self.indexing_array.elements.get(self.back); - self.back -= 1; - result - } else { - None - } - } -} - -#[cfg(test)] -mod test { - use light_concurrent_merkle_tree::light_hasher::Poseidon; - use num_bigint::{RandBigInt, ToBigUint}; - use rand::thread_rng; - - use super::*; - - #[test] - fn test_indexed_element_cmp() { - let mut rng = thread_rng(); - - for _ in 0..1000 { - let value = rng.gen_biguint(128); - let element_1 = IndexedElement:: { - index: 0, - value: value.clone(), - next_index: 1, - }; - let element_2 = IndexedElement:: { - index: 0, - value, - next_index: 1, - }; - assert_eq!(element_1, element_2); - assert_eq!(element_2, element_1); - assert!(matches!(element_1.cmp(&element_2), Ordering::Equal)); - assert!(matches!(element_2.cmp(&element_1), Ordering::Equal)); - - let value_higher = rng.gen_biguint(128); - if value_higher == 0.to_biguint().unwrap() { - continue; - } - let value_lower = rng.gen_biguint_below(&value_higher); - let element_lower = IndexedElement:: { - index: 0, - value: value_lower, - next_index: 1, - }; - let element_higher = IndexedElement:: { - index: 1, - value: value_higher, - next_index: 2, - }; - assert_ne!(element_lower, element_higher); - assert_ne!(element_higher, element_lower); - assert!(matches!(element_lower.cmp(&element_higher), Ordering::Less)); - assert!(matches!( - element_higher.cmp(&element_lower), - Ordering::Greater - )); - assert!(matches!( - element_lower.partial_cmp(&element_higher), - Some(Ordering::Less) - )); - assert!(matches!( - element_higher.partial_cmp(&element_lower), - Some(Ordering::Greater) - )); - } - } - - /// Tests the insertion of elements to the indexing array. - #[test] - fn test_append() { - // The initial state of the array looks like: - // - // ``` - // value = [0] [0] [0] [0] [0] [0] [0] [0] - // next_index = [0] [0] [0] [0] [0] [0] [0] [0] - // ``` - let mut indexed_array: IndexedArray = IndexedArray::default(); - - let nullifier1 = 30_u32.to_biguint().unwrap(); - let bundle1 = indexed_array.new_element(&nullifier1).unwrap(); - assert!(indexed_array.find_element(&nullifier1).is_none()); - indexed_array.append(&nullifier1).unwrap(); - - // After adding a new value 30, it should look like: - // - // ``` - // value = [ 0] [30] [0] [0] [0] [0] [0] [0] - // next_index = [ 1] [ 0] [0] [0] [0] [0] [0] [0] - // ``` - // - // Because: - // - // * Low element is the first node, with index 0 and value 0. There is - // no node with value greater as 30, so we found it as a one pointing to - // node 0 (which will always have value 0). - // * The new nullifier is inserted in index 1. - // * `next_*` fields of the low nullifier are updated to point to the new - // nullifier. - assert_eq!( - indexed_array.find_element(&nullifier1), - Some(&bundle1.new_element), - ); - let expected_hash = Poseidon::hashv(&[ - bigint_to_be_bytes_array::<32>(&nullifier1) - .unwrap() - .as_ref(), - 0_usize.to_be_bytes().as_ref(), - bigint_to_be_bytes_array::<32>(&(0.to_biguint().unwrap())) - .unwrap() - .as_ref(), - ]) - .unwrap(); - assert_eq!(indexed_array.hash_element(1).unwrap(), expected_hash); - assert_eq!( - indexed_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 1, - }, - ); - assert_eq!( - indexed_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexed_array.iter().collect::>().as_slice(), - &[ - &IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 1, - }, - &IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0 - } - ] - ); - - let nullifier2 = 10_u32.to_biguint().unwrap(); - let bundle2 = indexed_array.new_element(&nullifier2).unwrap(); - assert!(indexed_array.find_element(&nullifier2).is_none()); - indexed_array.append(&nullifier2).unwrap(); - - // After adding an another value 10, it should look like: - // - // ``` - // value = [ 0] [30] [10] [0] [0] [0] [0] [0] - // next_index = [ 2] [ 0] [ 1] [0] [0] [0] [0] [0] - // ``` - // - // Because: - // - // * Low nullifier is still the node 0, but this time for differen reason - - // its `next_index` 2 contains value 30, whish is greater than 10. - // * The new nullifier is inserted as node 2. - // * Low nullifier is pointing to the index 1. We assign the 1st nullifier - // as the next nullifier of our new nullifier. Therefore, our new nullifier - // looks like: `[value = 10, next_index = 1]`. - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 0, next_index = 2]`. - // * The previously inserted nullifier, the node 1, remains unchanged. - assert_eq!( - indexed_array.find_element(&nullifier2), - Some(&bundle2.new_element), - ); - let expected_hash = Poseidon::hashv(&[ - bigint_to_be_bytes_array::<32>(&nullifier2) - .unwrap() - .as_ref(), - 1_usize.to_be_bytes().as_ref(), - bigint_to_be_bytes_array::<32>(&(30.to_biguint().unwrap())) - .unwrap() - .as_ref(), - ]) - .unwrap(); - assert_eq!(indexed_array.hash_element(2).unwrap(), expected_hash); - assert_eq!( - indexed_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexed_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexed_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - assert_eq!( - indexed_array.iter().collect::>().as_slice(), - &[ - &IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - }, - &IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - }, - &IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 1, - } - ] - ); - - let nullifier3 = 20_u32.to_biguint().unwrap(); - let bundle3 = indexed_array.new_element(&nullifier3).unwrap(); - assert!(indexed_array.find_element(&nullifier3).is_none()); - indexed_array.append(&nullifier3).unwrap(); - - // After adding an another value 20, it should look like: - // - // ``` - // value = [ 0] [30] [10] [20] [0] [0] [0] [0] - // next_index = [ 2] [ 0] [ 3] [ 1] [0] [0] [0] [0] - // ``` - // - // Because: - // * Low nullifier is the node 2. - // * The new nullifier is inserted as node 3. - // * Low nullifier is pointing to the node 2. We assign the 1st nullifier - // as the next nullifier of our new nullifier. Therefore, our new - // nullifier looks like: - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 10, next_index = 3]`. - assert_eq!( - indexed_array.find_element(&nullifier3), - Some(&bundle3.new_element), - ); - let expected_hash = Poseidon::hashv(&[ - bigint_to_be_bytes_array::<32>(&nullifier3) - .unwrap() - .as_ref(), - 1_usize.to_be_bytes().as_ref(), - bigint_to_be_bytes_array::<32>(&(30.to_biguint().unwrap())) - .unwrap() - .as_ref(), - ]) - .unwrap(); - assert_eq!(indexed_array.hash_element(3).unwrap(), expected_hash); - assert_eq!( - indexed_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexed_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexed_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - } - ); - assert_eq!( - indexed_array.elements[3], - IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - assert_eq!( - indexed_array.iter().collect::>().as_slice(), - &[ - &IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - }, - &IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - }, - &IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - }, - &IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1 - } - ] - ); - - let nullifier4 = 50_u32.to_biguint().unwrap(); - let bundle4 = indexed_array.new_element(&nullifier4).unwrap(); - assert!(indexed_array.find_element(&nullifier4).is_none()); - indexed_array.append(&nullifier4).unwrap(); - - // After adding an another value 50, it should look like: - // - // ``` - // value = [ 0] [30] [10] [20] [50] [0] [0] [0] - // next_index = [ 2] [ 4] [ 3] [ 1] [0 ] [0] [0] [0] - // ``` - // - // Because: - // - // * Low nullifier is the node 1 - there is no node with value greater - // than 50, so we found it as a one having 0 as the `next_value`. - // * The new nullifier is inserted as node 4. - // * Low nullifier is not pointing to any node. So our new nullifier - // is not going to point to any other node either. Therefore, the new - // nullifier looks like: `[value = 50, next_index = 0]`. - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 30, next_index = 4]`. - assert_eq!( - indexed_array.find_element(&nullifier4), - Some(&bundle4.new_element), - ); - let expected_hash = Poseidon::hashv(&[ - bigint_to_be_bytes_array::<32>(&nullifier4) - .unwrap() - .as_ref(), - 0_usize.to_be_bytes().as_ref(), - bigint_to_be_bytes_array::<32>(&(0.to_biguint().unwrap())) - .unwrap() - .as_ref(), - ]) - .unwrap(); - assert_eq!(indexed_array.hash_element(4).unwrap(), expected_hash); - assert_eq!( - indexed_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexed_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 4, - } - ); - assert_eq!( - indexed_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - } - ); - assert_eq!( - indexed_array.elements[3], - IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - assert_eq!( - indexed_array.elements[4], - IndexedElement { - index: 4, - value: 50_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexed_array.iter().collect::>().as_slice(), - &[ - &IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - }, - &IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 4, - }, - &IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - }, - &IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - }, - &IndexedElement { - index: 4, - value: 50_u32.to_biguint().unwrap(), - next_index: 0, - } - ] - ); - } - - #[test] - fn test_append_with_low_element_index() { - // The initial state of the array looks like: - // - // ``` - // value = [0] [0] [0] [0] [0] [0] [0] [0] - // next_index = [0] [0] [0] [0] [0] [0] [0] [0] - // ``` - let mut indexing_array: IndexedArray = IndexedArray::default(); - - let low_element_index = 0; - let nullifier1 = 30_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier1) - .unwrap(); - - // After adding a new value 30, it should look like: - // - // ``` - // value = [ 0] [30] [0] [0] [0] [0] [0] [0] - // next_index = [ 1] [ 0] [0] [0] [0] [0] [0] [0] - // ``` - // - // Because: - // - // * Low element is the first node, with index 0 and value 0. There is - // no node with value greater as 30, so we found it as a one pointing to - // node 0 (which will always have value 0). - // * The new nullifier is inserted in index 1. - // * `next_*` fields of the low nullifier are updated to point to the new - // nullifier. - assert_eq!( - indexing_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 1, - }, - ); - assert_eq!( - indexing_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - - let low_element_index = 0; - let nullifier2 = 10_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier2) - .unwrap(); - - // After adding an another value 10, it should look like: - // - // ``` - // value = [ 0] [30] [10] [0] [0] [0] [0] [0] - // next_index = [ 2] [ 0] [ 1] [0] [0] [0] [0] [0] - // ``` - // - // Because: - // - // * Low nullifier is still the node 0, but this time for differen reason - - // its `next_index` 2 contains value 30, whish is greater than 10. - // * The new nullifier is inserted as node 2. - // * Low nullifier is pointing to the index 1. We assign the 1st nullifier - // as the next nullifier of our new nullifier. Therefore, our new nullifier - // looks like: `[value = 10, next_index = 1]`. - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 0, next_index = 2]`. - // * The previously inserted nullifier, the node 1, remains unchanged. - assert_eq!( - indexing_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexing_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexing_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - - let low_element_index = 2; - let nullifier3 = 20_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier3) - .unwrap(); - - // After adding an another value 20, it should look like: - // - // ``` - // value = [ 0] [30] [10] [20] [0] [0] [0] [0] - // next_index = [ 2] [ 0] [ 3] [ 1] [0] [0] [0] [0] - // ``` - // - // Because: - // * Low nullifier is the node 2. - // * The new nullifier is inserted as node 3. - // * Low nullifier is pointing to the node 2. We assign the 1st nullifier - // as the next nullifier of our new nullifier. Therefore, our new - // nullifier looks like: - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 10, next_index = 3]`. - assert_eq!( - indexing_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexing_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - assert_eq!( - indexing_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - } - ); - assert_eq!( - indexing_array.elements[3], - IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - - let low_element_index = 1; - let nullifier4 = 50_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier4) - .unwrap(); - - // After adding an another value 50, it should look like: - // - // ``` - // value = [ 0] [30] [10] [20] [50] [0] [0] [0] - // next_index = [ 2] [ 4] [ 3] [ 1] [0 ] [0] [0] [0] - // ``` - // - // Because: - // - // * Low nullifier is the node 1 - there is no node with value greater - // than 50, so we found it as a one having 0 as the `next_value`. - // * The new nullifier is inserted as node 4. - // * Low nullifier is not pointing to any node. So our new nullifier - // is not going to point to any other node either. Therefore, the new - // nullifier looks like: `[value = 50, next_index = 0]`. - // * Low nullifier is updated to point to the new nullifier. Therefore, - // after update it looks like: `[value = 30, next_index = 4]`. - assert_eq!( - indexing_array.elements[0], - IndexedElement { - index: 0, - value: 0_u32.to_biguint().unwrap(), - next_index: 2, - } - ); - assert_eq!( - indexing_array.elements[1], - IndexedElement { - index: 1, - value: 30_u32.to_biguint().unwrap(), - next_index: 4, - } - ); - assert_eq!( - indexing_array.elements[2], - IndexedElement { - index: 2, - value: 10_u32.to_biguint().unwrap(), - next_index: 3, - } - ); - assert_eq!( - indexing_array.elements[3], - IndexedElement { - index: 3, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - } - ); - assert_eq!( - indexing_array.elements[4], - IndexedElement { - index: 4, - value: 50_u32.to_biguint().unwrap(), - next_index: 0, - } - ); - } - - /// Tries to violate the integrity of the array by pointing to invalid low - /// nullifiers. Tests whether the range check works correctly and disallows - /// the invalid appends from happening. - #[test] - fn test_append_with_low_element_index_invalid() { - // The initial state of the array looks like: - // - // ``` - // value = [0] [0] [0] [0] [0] [0] [0] [0] - // next_index = [0] [0] [0] [0] [0] [0] [0] [0] - // ``` - let mut indexing_array: IndexedArray = IndexedArray::default(); - - // Append nullifier 30. The low nullifier is at index 0. The array - // should look like: - // - // ``` - // value = [ 0] [30] [0] [0] [0] [0] [0] [0] - // next_index = [ 1] [ 0] [0] [0] [0] [0] [0] [0] - // ``` - let low_element_index = 0; - let nullifier1 = 30_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier1) - .unwrap(); - - // Try appending nullifier 20, while pointing to index 1 as low - // nullifier. - // Therefore, the new element is lower than the supposed low element. - let low_element_index = 1; - let nullifier2 = 20_u32.to_biguint().unwrap(); - assert!(matches!( - indexing_array.append_with_low_element_index(low_element_index, &nullifier2), - Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement) - )); - - // Try appending nullifier 50, while pointing to index 0 as low - // nullifier. - // Therefore, the new element is greater than next element. - let low_element_index = 0; - let nullifier2 = 50_u32.to_biguint().unwrap(); - assert!(matches!( - indexing_array.append_with_low_element_index(low_element_index, &nullifier2), - Err(IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement), - )); - - // Append nullifier 50 correctly, with 0 as low nullifier. The array - // should look like: - // - // ``` - // value = [ 0] [30] [50] [0] [0] [0] [0] [0] - // next_index = [ 1] [ 2] [ 0] [0] [0] [0] [0] [0] - // ``` - let low_element_index = 1; - let nullifier2 = 50_u32.to_biguint().unwrap(); - indexing_array - .append_with_low_element_index(low_element_index, &nullifier2) - .unwrap(); - - // Try appending nullifier 40, while pointint to index 2 (value 50) as - // low nullifier. - // Therefore, the pointed low element is greater than the new element. - let low_element_index = 2; - let nullifier3 = 40_u32.to_biguint().unwrap(); - assert!(matches!( - indexing_array.append_with_low_element_index(low_element_index, &nullifier3), - Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement) - )); - } - - /// Tests whether `find_*_for_existent` elements return `None` when a - /// nonexistent is provided. - #[test] - fn test_find_low_element_for_existent_element() { - let mut indexed_array: IndexedArray = IndexedArray::default(); - - // Append nullifiers 40 and 20. - let low_element_index = 0; - let nullifier_1 = 40_u32.to_biguint().unwrap(); - indexed_array - .append_with_low_element_index(low_element_index, &nullifier_1) - .unwrap(); - let low_element_index = 0; - let nullifier_2 = 20_u32.to_biguint().unwrap(); - indexed_array - .append_with_low_element_index(low_element_index, &nullifier_2) - .unwrap(); - - // Try finding a low element for nonexistent nullifier 30. - let nonexistent_nullifier = 30_u32.to_biguint().unwrap(); - // `*_existent` methods should fail. - let res = indexed_array.find_low_element_index_for_existent(&nonexistent_nullifier); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::ElementDoesNotExist) - )); - let res = indexed_array.find_low_element_for_existent(&nonexistent_nullifier); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::ElementDoesNotExist) - )); - // `*_nonexistent` methods should succeed. - let low_element_index = indexed_array - .find_low_element_index_for_nonexistent(&nonexistent_nullifier) - .unwrap(); - assert_eq!(low_element_index, 2); - let low_element = indexed_array - .find_low_element_for_nonexistent(&nonexistent_nullifier) - .unwrap(); - assert_eq!( - low_element, - ( - IndexedElement:: { - index: 2, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - }, - 40_u32.to_biguint().unwrap(), - ) - ); - - // Try finding a low element of existent nullifier 40. - // `_existent` methods should succeed. - let low_element_index = indexed_array - .find_low_element_index_for_existent(&nullifier_1) - .unwrap(); - assert_eq!(low_element_index, 2); - let low_element = indexed_array - .find_low_element_for_existent(&nullifier_1) - .unwrap(); - assert_eq!( - low_element, - IndexedElement:: { - index: 2, - value: 20_u32.to_biguint().unwrap(), - next_index: 1, - }, - ); - // `*_nonexistent` methods should fail. - let res = indexed_array.find_low_element_index_for_nonexistent(&nullifier_1); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::ElementAlreadyExists) - )); - let res = indexed_array.find_low_element_for_nonexistent(&nullifier_1); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::ElementAlreadyExists) - )); - } -} diff --git a/indexed/src/changelog.rs b/indexed/src/changelog.rs deleted file mode 100644 index d2515d4..0000000 --- a/indexed/src/changelog.rs +++ /dev/null @@ -1,16 +0,0 @@ -use light_concurrent_merkle_tree::event::RawIndexedElement; - -/// NET_HEIGHT = HEIGHT - CANOPY_DEPTH -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct IndexedChangelogEntry -where - I: Clone, -{ - /// Element that was a subject to the change. - pub element: RawIndexedElement, - /// Merkle proof of that operation. - pub proof: [[u8; 32]; NET_HEIGHT], - /// Index of a changelog entry in `ConcurrentMerkleTree` corresponding to - /// the same operation. - pub changelog_index: usize, -} diff --git a/indexed/src/copy.rs b/indexed/src/copy.rs deleted file mode 100644 index f746533..0000000 --- a/indexed/src/copy.rs +++ /dev/null @@ -1,206 +0,0 @@ -use std::{fmt, marker::PhantomData, ops::Deref}; - -use crate::{errors::IndexedMerkleTreeError, IndexedMerkleTree}; -use light_bounded_vec::CyclicBoundedVecMetadata; -use light_concurrent_merkle_tree::{ - copy::ConcurrentMerkleTreeCopy, errors::ConcurrentMerkleTreeError, -}; -use light_hasher::Hasher; -use light_utils::offset::copy::{read_cyclic_bounded_vec_at, read_value_at}; -use num_traits::{CheckedAdd, CheckedSub, ToBytes, Unsigned}; - -#[derive(Debug)] -pub struct IndexedMerkleTreeCopy( - IndexedMerkleTree, -) -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From; - -impl - IndexedMerkleTreeCopy -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - /// Casts a byte slice into wrapped `IndexedMerkleTree` structure reference, - /// including dynamic fields. - /// - /// # Purpose - /// - /// This method is meant to be used mostly in Solana programs, where memory - /// constraints are tight and we want to make sure no data is copied. - pub fn from_bytes_copy(bytes: &[u8]) -> Result { - let (merkle_tree, mut offset) = - ConcurrentMerkleTreeCopy::::struct_from_bytes_copy(bytes)?; - - let indexed_changelog_metadata: CyclicBoundedVecMetadata = - unsafe { read_value_at(bytes, &mut offset) }; - - let expected_size = IndexedMerkleTree::::size_in_account( - merkle_tree.height, - merkle_tree.changelog.capacity(), - merkle_tree.roots.capacity(), - merkle_tree.canopy_depth, - indexed_changelog_metadata.capacity(), - ); - - if bytes.len() < expected_size { - return Err(IndexedMerkleTreeError::ConcurrentMerkleTree( - ConcurrentMerkleTreeError::BufferSize(expected_size, bytes.len()), - )); - } - let indexed_changelog = - unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &indexed_changelog_metadata) }; - - Ok(Self(IndexedMerkleTree { - merkle_tree, - indexed_changelog, - _index: PhantomData, - })) - } -} - -impl Deref - for IndexedMerkleTreeCopy -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - type Target = IndexedMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -#[cfg(test)] -mod test { - use light_hasher::Poseidon; - use light_utils::bigint::bigint_to_be_bytes_array; - use num_bigint::RandBigInt; - use rand::thread_rng; - - use crate::zero_copy::IndexedMerkleTreeZeroCopyMut; - - use super::*; - - fn from_bytes_copy< - const HEIGHT: usize, - const CHANGELOG_SIZE: usize, - const ROOTS: usize, - const CANOPY_DEPTH: usize, - const INDEXED_CHANGELOG_SIZE: usize, - const OPERATIONS: usize, - const NET_HEIGHT: usize, - >() { - let mut mt_1 = IndexedMerkleTree::::new( - HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE, - ) - .unwrap(); - mt_1.init().unwrap(); - - let mut bytes = vec![ - 0u8; - IndexedMerkleTree::::size_in_account( - HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE - ) - ]; - - { - let mut mt_2 = - IndexedMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - &mut bytes, - HEIGHT, - CANOPY_DEPTH, - CHANGELOG_SIZE, - ROOTS, - INDEXED_CHANGELOG_SIZE, - ) - .unwrap(); - mt_2.init().unwrap(); - - assert_eq!(mt_1, *mt_2); - } - - let mut rng = thread_rng(); - - for _ in 0..OPERATIONS { - // Reload the tree from bytes on each iteration. - let mut mt_2 = - IndexedMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - &mut bytes, - ) - .unwrap(); - - let leaf: [u8; 32] = bigint_to_be_bytes_array::<32>(&rng.gen_biguint(248)).unwrap(); - mt_1.append(&leaf).unwrap(); - mt_2.append(&leaf).unwrap(); - - assert_eq!(mt_1, *mt_2); - } - - // Read a copy of that Merkle tree. - let mt_2 = - IndexedMerkleTreeCopy::::from_bytes_copy(&bytes) - .unwrap(); - - assert_eq!(mt_1, *mt_2); - } - - #[test] - fn test_from_bytes_copy_26_1400_2400_10_256_1024() { - const HEIGHT: usize = 26; - const CHANGELOG_SIZE: usize = 1400; - const ROOTS: usize = 2400; - const CANOPY_DEPTH: usize = 10; - const INDEXED_CHANGELOG_SIZE: usize = 256; - const NET_HEIGHT: usize = 16; - const OPERATIONS: usize = 1024; - - from_bytes_copy::< - HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE, - OPERATIONS, - NET_HEIGHT, - >() - } -} diff --git a/indexed/src/errors.rs b/indexed/src/errors.rs deleted file mode 100644 index c64b6bb..0000000 --- a/indexed/src/errors.rs +++ /dev/null @@ -1,66 +0,0 @@ -use light_bounded_vec::BoundedVecError; -use light_concurrent_merkle_tree::{ - errors::ConcurrentMerkleTreeError, light_hasher::errors::HasherError, -}; -use light_utils::UtilsError; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum IndexedMerkleTreeError { - #[error("Integer overflow")] - IntegerOverflow, - #[error("Invalid index, it exceeds the number of elements.")] - IndexHigherThanMax, - #[error("Could not find the low element.")] - LowElementNotFound, - #[error("Low element is greater or equal to the provided new element.")] - LowElementGreaterOrEqualToNewElement, - #[error("The provided new element is greater or equal to the next element.")] - NewElementGreaterOrEqualToNextElement, - #[error("The element already exists, but was expected to be absent.")] - ElementAlreadyExists, - #[error("The element does not exist, but was expected to be present.")] - ElementDoesNotExist, - #[error("Invalid changelog buffer size, expected {0}, got {1}")] - ChangelogBufferSize(usize, usize), - #[error("Hasher error: {0}")] - Hasher(#[from] HasherError), - #[error("Concurrent Merkle tree error: {0}")] - ConcurrentMerkleTree(#[from] ConcurrentMerkleTreeError), - #[error("Utils error {0}")] - Utils(#[from] UtilsError), - #[error("Bounded vector error: {0}")] - BoundedVec(#[from] BoundedVecError), - #[error("Indexed array is full, cannot append more elements")] - ArrayFull, -} - -// NOTE(vadorovsky): Unfortunately, we need to do it by hand. `num_derive::ToPrimitive` -// doesn't support data-carrying enums. -#[cfg(feature = "solana")] -impl From for u32 { - fn from(e: IndexedMerkleTreeError) -> u32 { - match e { - IndexedMerkleTreeError::IntegerOverflow => 11001, - IndexedMerkleTreeError::IndexHigherThanMax => 11002, - IndexedMerkleTreeError::LowElementNotFound => 11003, - IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement => 11004, - IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement => 11005, - IndexedMerkleTreeError::ElementAlreadyExists => 11006, - IndexedMerkleTreeError::ElementDoesNotExist => 11007, - IndexedMerkleTreeError::ChangelogBufferSize(_, _) => 11008, - IndexedMerkleTreeError::ArrayFull => 11009, - IndexedMerkleTreeError::Hasher(e) => e.into(), - IndexedMerkleTreeError::ConcurrentMerkleTree(e) => e.into(), - IndexedMerkleTreeError::Utils(e) => e.into(), - IndexedMerkleTreeError::BoundedVec(e) => e.into(), - } - } -} - -#[cfg(feature = "solana")] -impl From for solana_program::program_error::ProgramError { - fn from(e: IndexedMerkleTreeError) -> Self { - solana_program::program_error::ProgramError::Custom(e.into()) - } -} diff --git a/indexed/src/lib.rs b/indexed/src/lib.rs deleted file mode 100644 index b404734..0000000 --- a/indexed/src/lib.rs +++ /dev/null @@ -1,530 +0,0 @@ -use std::{ - fmt, - marker::PhantomData, - mem, - ops::{Deref, DerefMut}, -}; - -use array::{IndexedArray, IndexedElement}; -use changelog::IndexedChangelogEntry; -use light_bounded_vec::{BoundedVec, CyclicBoundedVec, CyclicBoundedVecMetadata}; -use light_concurrent_merkle_tree::{ - errors::ConcurrentMerkleTreeError, - event::{IndexedMerkleTreeUpdate, RawIndexedElement}, - light_hasher::Hasher, - ConcurrentMerkleTree, -}; -use light_utils::bigint::bigint_to_be_bytes_array; -use num_bigint::BigUint; -use num_traits::{CheckedAdd, CheckedSub, ToBytes, Unsigned}; - -pub mod array; -pub mod changelog; -pub mod copy; -pub mod errors; -pub mod reference; -pub mod zero_copy; - -use crate::errors::IndexedMerkleTreeError; - -pub const HIGHEST_ADDRESS_PLUS_ONE: &str = - "452312848583266388373324160190187140051835877600158453279131187530910662655"; - -#[derive(Debug)] -#[repr(C)] -pub struct IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - pub merkle_tree: ConcurrentMerkleTree, - pub indexed_changelog: CyclicBoundedVec>, - - _index: PhantomData, -} - -pub type IndexedMerkleTree26 = IndexedMerkleTree; - -impl IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - /// Size of the struct **without** dynamically sized fields (`BoundedVec`, - /// `CyclicBoundedVec`). - pub fn non_dyn_fields_size() -> usize { - ConcurrentMerkleTree::::non_dyn_fields_size() - // indexed_changelog (metadata) - + mem::size_of::() - } - - // TODO(vadorovsky): Make a macro for that. - pub fn size_in_account( - height: usize, - changelog_size: usize, - roots_size: usize, - canopy_depth: usize, - indexed_changelog_size: usize, - ) -> usize { - ConcurrentMerkleTree::::size_in_account( - height, - changelog_size, - roots_size, - canopy_depth, - ) - // indexed_changelog (metadata) - + mem::size_of::() - // indexed_changelog - + mem::size_of::>() * indexed_changelog_size - } - - pub fn new( - height: usize, - changelog_size: usize, - roots_size: usize, - canopy_depth: usize, - indexed_changelog_size: usize, - ) -> Result { - let merkle_tree = ConcurrentMerkleTree::::new( - height, - changelog_size, - roots_size, - canopy_depth, - )?; - Ok(Self { - merkle_tree, - indexed_changelog: CyclicBoundedVec::with_capacity(indexed_changelog_size), - _index: PhantomData, - }) - } - - pub fn init(&mut self) -> Result<(), IndexedMerkleTreeError> { - self.merkle_tree.init()?; - - // Append the first low leaf, which has value 0 and does not point - // to any other leaf yet. - // This low leaf is going to be updated during the first `update` - // operation. - self.merkle_tree.append(&H::zero_indexed_leaf())?; - - // Emit first changelog entries. - let element = RawIndexedElement { - value: [0_u8; 32], - next_index: I::zero(), - next_value: [0_u8; 32], - index: I::zero(), - }; - let changelog_entry = IndexedChangelogEntry { - element, - proof: H::zero_bytes()[..NET_HEIGHT].try_into().unwrap(), - changelog_index: 0, - }; - self.indexed_changelog.push(changelog_entry.clone()); - self.indexed_changelog.push(changelog_entry); - - Ok(()) - } - - /// Add the hightest element with a maximum value allowed by the prime - /// field. - /// - /// Initializing an indexed Merkle tree not only with the lowest element - /// (mandatory for the IMT algorithm to work), but also the highest element, - /// makes non-inclusion proofs easier - there is no special case needed for - /// the first insertion. - /// - /// However, it comes with a tradeoff - the space available in the tree - /// becomes lower by 1. - pub fn add_highest_element(&mut self) -> Result<(), IndexedMerkleTreeError> { - let mut indexed_array = IndexedArray::::default(); - let element_bundle = indexed_array.init()?; - let new_low_leaf = element_bundle - .new_low_element - .hash::(&element_bundle.new_element.value)?; - - let mut proof = BoundedVec::with_capacity(self.merkle_tree.height); - for i in 0..self.merkle_tree.height - self.merkle_tree.canopy_depth { - // PANICS: Calling `unwrap()` pushing into this bounded vec - // cannot panic since it has enough capacity. - proof.push(H::zero_bytes()[i]).unwrap(); - } - - let (changelog_index, _) = self.merkle_tree.update( - self.changelog_index(), - &H::zero_indexed_leaf(), - &new_low_leaf, - 0, - &mut proof, - )?; - - // Emit changelog for low element. - let low_element = RawIndexedElement { - value: bigint_to_be_bytes_array::<32>(&element_bundle.new_low_element.value)?, - next_index: element_bundle.new_low_element.next_index, - next_value: bigint_to_be_bytes_array::<32>(&element_bundle.new_element.value)?, - index: element_bundle.new_low_element.index, - }; - - let low_element_changelog_entry = IndexedChangelogEntry { - element: low_element, - proof: H::zero_bytes()[..NET_HEIGHT].try_into().unwrap(), - changelog_index, - }; - self.indexed_changelog.push(low_element_changelog_entry); - - let new_leaf = element_bundle - .new_element - .hash::(&element_bundle.new_element_next_value)?; - let mut proof = BoundedVec::with_capacity(self.height); - let (changelog_index, _) = self.merkle_tree.append_with_proof(&new_leaf, &mut proof)?; - - // Emit changelog for new element. - let new_element = RawIndexedElement { - value: bigint_to_be_bytes_array::<32>(&element_bundle.new_element.value)?, - next_index: element_bundle.new_element.next_index, - next_value: [0_u8; 32], - index: element_bundle.new_element.index, - }; - let new_element_changelog_entry = IndexedChangelogEntry { - element: new_element, - proof: proof.as_slice()[..NET_HEIGHT].try_into().unwrap(), - changelog_index, - }; - - self.indexed_changelog.push(new_element_changelog_entry); - - Ok(()) - } - - pub fn indexed_changelog_index(&self) -> usize { - self.indexed_changelog.last_index() - } - - /// Checks whether the given Merkle `proof` for the given `node` (with index - /// `i`) is valid. The proof is valid when computing parent node hashes using - /// the whole path of the proof gives the same result as the given `root`. - pub fn validate_proof( - &self, - leaf: &[u8; 32], - leaf_index: usize, - proof: &BoundedVec<[u8; 32]>, - ) -> Result<(), IndexedMerkleTreeError> { - self.merkle_tree.validate_proof(leaf, leaf_index, proof)?; - Ok(()) - } - - /// Iterates over indexed changelog and every time an entry corresponding - /// to the provided `low_element` is found, it patches: - /// - /// * Changelog index - indexed changelog entries contain corresponding - /// changelog indices. - /// * New element - changes might impact the `next_index` field, which in - /// such case is updated. - /// * Low element - it might completely change if a change introduced an - /// element in our range. - /// * Merkle proof. - #[allow(clippy::type_complexity)] - pub fn patch_elements_and_proof( - &mut self, - indexed_changelog_index: usize, - changelog_index: &mut usize, - new_element: &mut IndexedElement, - low_element: &mut IndexedElement, - low_element_next_value: &mut BigUint, - low_leaf_proof: &mut BoundedVec<[u8; 32]>, - ) -> Result<(), IndexedMerkleTreeError> { - let next_indexed_changelog_indices: Vec = self - .indexed_changelog - .iter_from(indexed_changelog_index)? - .skip(1) - .enumerate() - .filter_map(|(index, changelog_entry)| { - if changelog_entry.element.index == low_element.index { - Some((indexed_changelog_index + 1 + index) % self.indexed_changelog.len()) - } else { - None - } - }) - .collect(); - - let mut new_low_element = None; - - for next_indexed_changelog_index in next_indexed_changelog_indices { - let changelog_entry = &mut self.indexed_changelog[next_indexed_changelog_index]; - - let next_element_value = BigUint::from_bytes_be(&changelog_entry.element.next_value); - if next_element_value < new_element.value { - // If the next element is lower than the current element, it means - // that it should become the low element. - // - // Save it and break the loop. - new_low_element = Some(( - (next_indexed_changelog_index + 1) % self.indexed_changelog.len(), - next_element_value, - )); - break; - } - - // Patch the changelog index. - *changelog_index = changelog_entry.changelog_index; - - // Patch the `next_index` of `new_element`. - new_element.next_index = changelog_entry.element.next_index; - - // Patch the element. - low_element.update_from_raw_element(&changelog_entry.element); - // Patch the next value. - *low_element_next_value = BigUint::from_bytes_be(&changelog_entry.element.next_value); - // Patch the proof. - for i in 0..low_leaf_proof.len() { - low_leaf_proof[i] = changelog_entry.proof[i]; - } - } - - // If we found a new low element. - if let Some((new_low_element_changelog_index, new_low_element)) = new_low_element { - let new_low_element_changelog_entry = - &self.indexed_changelog[new_low_element_changelog_index]; - *changelog_index = new_low_element_changelog_entry.changelog_index; - *low_element = IndexedElement { - index: new_low_element_changelog_entry.element.index, - value: new_low_element.clone(), - next_index: new_low_element_changelog_entry.element.next_index, - }; - - for i in 0..low_leaf_proof.len() { - low_leaf_proof[i] = new_low_element_changelog_entry.proof[i]; - } - new_element.next_index = low_element.next_index; - - // Start the patching process from scratch for the new low element. - return self.patch_elements_and_proof( - new_low_element_changelog_index, - changelog_index, - new_element, - low_element, - low_element_next_value, - low_leaf_proof, - ); - } - - Ok(()) - } - - pub fn update( - &mut self, - mut changelog_index: usize, - indexed_changelog_index: usize, - new_element_value: BigUint, - mut low_element: IndexedElement, - mut low_element_next_value: BigUint, - low_leaf_proof: &mut BoundedVec<[u8; 32]>, - ) -> Result, IndexedMerkleTreeError> { - let mut new_element = IndexedElement { - index: I::try_from(self.merkle_tree.next_index()) - .map_err(|_| IndexedMerkleTreeError::IntegerOverflow)?, - value: new_element_value, - next_index: low_element.next_index, - }; - println!("low_element: {:?}", low_element); - - self.patch_elements_and_proof( - indexed_changelog_index, - &mut changelog_index, - &mut new_element, - &mut low_element, - &mut low_element_next_value, - low_leaf_proof, - )?; - println!("patched low_element: {:?}", low_element); - // Check that the value of `new_element` belongs to the range - // of `old_low_element`. - if low_element.next_index == I::zero() { - // In this case, the `old_low_element` is the greatest element. - // The value of `new_element` needs to be greater than the value of - // `old_low_element` (and therefore, be the greatest). - if new_element.value <= low_element.value { - return Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement); - } - } else { - // The value of `new_element` needs to be greater than the value of - // `old_low_element` (and therefore, be the greatest). - if new_element.value <= low_element.value { - return Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement); - } - // The value of `new_element` needs to be lower than the value of - // next element pointed by `old_low_element`. - if new_element.value >= low_element_next_value { - return Err(IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement); - } - } - // Instantiate `new_low_element` - the low element with updated values. - let new_low_element = IndexedElement { - index: low_element.index, - value: low_element.value.clone(), - next_index: new_element.index, - }; - // Update low element. If the `old_low_element` does not belong to the - // tree, validating the proof is going to fail. - let old_low_leaf = low_element.hash::(&low_element_next_value)?; - - let new_low_leaf = new_low_element.hash::(&new_element.value)?; - - let (new_changelog_index, _) = self.merkle_tree.update( - changelog_index, - &old_low_leaf, - &new_low_leaf, - low_element.index.into(), - low_leaf_proof, - )?; - - // Emit changelog entry for low element. - let new_low_element = RawIndexedElement { - value: bigint_to_be_bytes_array::<32>(&new_low_element.value).unwrap(), - next_index: new_low_element.next_index, - next_value: bigint_to_be_bytes_array::<32>(&new_element.value)?, - index: new_low_element.index, - }; - let low_element_changelog_entry = IndexedChangelogEntry { - element: new_low_element, - proof: low_leaf_proof.as_slice()[..NET_HEIGHT].try_into().unwrap(), - changelog_index: new_changelog_index, - }; - - self.indexed_changelog.push(low_element_changelog_entry); - - // New element is always the newest one in the tree. Since we - // support concurrent updates, the index provided by the caller - // might be outdated. Let's just use the latest index indicated - // by the tree. - new_element.index = - I::try_from(self.next_index()).map_err(|_| IndexedMerkleTreeError::IntegerOverflow)?; - - // Append new element. - let mut proof = BoundedVec::with_capacity(self.height); - let new_leaf = new_element.hash::(&low_element_next_value)?; - let (new_changelog_index, _) = self.merkle_tree.append_with_proof(&new_leaf, &mut proof)?; - - // Prepare raw new element to save in changelog. - let raw_new_element = RawIndexedElement { - value: bigint_to_be_bytes_array::<32>(&new_element.value).unwrap(), - next_index: new_element.next_index, - next_value: bigint_to_be_bytes_array::<32>(&low_element_next_value)?, - index: new_element.index, - }; - - // Emit changelog entry for new element. - let new_element_changelog_entry = IndexedChangelogEntry { - element: raw_new_element, - proof: proof.as_slice()[..NET_HEIGHT].try_into().unwrap(), - changelog_index: new_changelog_index, - }; - self.indexed_changelog.push(new_element_changelog_entry); - - let output = IndexedMerkleTreeUpdate { - new_low_element, - new_low_element_hash: new_low_leaf, - new_high_element: raw_new_element, - new_high_element_hash: new_leaf, - }; - - Ok(output) - } -} - -impl Deref - for IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - type Target = ConcurrentMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.merkle_tree - } -} - -impl DerefMut - for IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.merkle_tree - } -} - -impl PartialEq - for IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - fn eq(&self, other: &Self) -> bool { - self.merkle_tree.eq(&other.merkle_tree) - && self - .indexed_changelog - .capacity() - .eq(&other.indexed_changelog.capacity()) - && self - .indexed_changelog - .len() - .eq(&other.indexed_changelog.len()) - && self - .indexed_changelog - .first_index() - .eq(&other.indexed_changelog.first_index()) - && self - .indexed_changelog - .last_index() - .eq(&other.indexed_changelog.last_index()) - && self.indexed_changelog.eq(&other.indexed_changelog) - } -} diff --git a/indexed/src/reference.rs b/indexed/src/reference.rs deleted file mode 100644 index 7861d02..0000000 --- a/indexed/src/reference.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::marker::PhantomData; - -use light_bounded_vec::{BoundedVec, BoundedVecError}; -use light_concurrent_merkle_tree::light_hasher::{errors::HasherError, Hasher}; -use light_merkle_tree_reference::{MerkleTree, ReferenceMerkleTreeError}; -use light_utils::bigint::bigint_to_be_bytes_array; -use num_bigint::BigUint; -use num_traits::{CheckedAdd, CheckedSub, Num, ToBytes, Unsigned}; -use thiserror::Error; - -use crate::{ - array::{IndexedArray, IndexedElement}, - errors::IndexedMerkleTreeError, - HIGHEST_ADDRESS_PLUS_ONE, -}; - -#[derive(Debug, Error)] -pub enum IndexedReferenceMerkleTreeError { - #[error("NonInclusionProofFailedLowerBoundViolated")] - NonInclusionProofFailedLowerBoundViolated, - #[error("NonInclusionProofFailedHigherBoundViolated")] - NonInclusionProofFailedHigherBoundViolated, - #[error(transparent)] - Indexed(#[from] IndexedMerkleTreeError), - #[error(transparent)] - Reference(#[from] ReferenceMerkleTreeError), - #[error(transparent)] - Hasher(#[from] HasherError), -} - -#[derive(Debug, Clone)] -#[repr(C)] -pub struct IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, -{ - pub merkle_tree: MerkleTree, - _index: PhantomData, -} - -impl IndexedMerkleTree -where - H: Hasher, - I: CheckedAdd + CheckedSub + Copy + Clone + PartialOrd + ToBytes + TryFrom + Unsigned, - usize: From, -{ - pub fn new( - height: usize, - canopy_depth: usize, - ) -> Result { - let mut merkle_tree = MerkleTree::new(height, canopy_depth); - - // Append the first low leaf, which has value 0 and does not point - // to any other leaf yet. - // This low leaf is going to be updated during the first `update` - // operation. - merkle_tree.append(&H::zero_indexed_leaf())?; - - Ok(Self { - merkle_tree, - _index: PhantomData, - }) - } - - /// Initializes the reference indexed merkle tree on par with the - /// on-chain indexed concurrent merkle tree. - /// Inserts the ranges 0 - BN254 Field Size - 1 into the tree. - pub fn init(&mut self) -> Result<(), IndexedReferenceMerkleTreeError> { - let mut indexed_array = IndexedArray::::default(); - let init_value = BigUint::from_str_radix(HIGHEST_ADDRESS_PLUS_ONE, 10).unwrap(); - let nullifier_bundle = indexed_array.append(&init_value)?; - let new_low_leaf = nullifier_bundle - .new_low_element - .hash::(&nullifier_bundle.new_element.value)?; - - self.merkle_tree.update(&new_low_leaf, 0)?; - let new_leaf = nullifier_bundle - .new_element - .hash::(&nullifier_bundle.new_element_next_value)?; - self.merkle_tree.append(&new_leaf)?; - Ok(()) - } - - pub fn get_path_of_leaf( - &self, - index: usize, - full: bool, - ) -> Result, BoundedVecError> { - self.merkle_tree.get_path_of_leaf(index, full) - } - - pub fn get_proof_of_leaf( - &self, - index: usize, - full: bool, - ) -> Result, BoundedVecError> { - self.merkle_tree.get_proof_of_leaf(index, full) - } - - pub fn root(&self) -> [u8; 32] { - self.merkle_tree.root() - } - - // TODO: rename input values - pub fn update( - &mut self, - new_low_element: &IndexedElement, - new_element: &IndexedElement, - new_element_next_value: &BigUint, - ) -> Result<(), IndexedReferenceMerkleTreeError> { - // Update the low element. - let new_low_leaf = new_low_element.hash::(&new_element.value)?; - println!("reference update new low leaf hash {:?}", new_low_leaf); - self.merkle_tree - .update(&new_low_leaf, usize::from(new_low_element.index))?; - println!("reference updated root {:?}", self.merkle_tree.root()); - // Append the new element. - let new_leaf = new_element.hash::(new_element_next_value)?; - println!("reference update new leaf hash {:?}", new_leaf); - self.merkle_tree.append(&new_leaf)?; - println!("reference appended root {:?}", self.merkle_tree.root()); - - Ok(()) - } - - // TODO: add append with new value, so that we don't need to compute the lowlevel values manually - pub fn append( - &mut self, - value: &BigUint, - indexed_array: &mut IndexedArray, - ) -> Result<(), IndexedReferenceMerkleTreeError> { - println!("appending {:?}", value); - let nullifier_bundle = indexed_array.append(value).unwrap(); - self.update( - &nullifier_bundle.new_low_element, - &nullifier_bundle.new_element, - &nullifier_bundle.new_element_next_value, - )?; - - Ok(()) - } - - pub fn get_non_inclusion_proof( - &self, - value: &BigUint, - indexed_array: &IndexedArray, - ) -> Result { - let (low_element, _next_value) = indexed_array.find_low_element_for_nonexistent(value)?; - let merkle_proof = self - .get_proof_of_leaf(usize::from(low_element.index), true) - .unwrap(); - let higher_range_value = indexed_array - .get(low_element.next_index()) - .unwrap() - .value - .clone(); - Ok(NonInclusionProof { - root: self.root(), - value: bigint_to_be_bytes_array::<32>(value).unwrap(), - leaf_lower_range_value: bigint_to_be_bytes_array::<32>(&low_element.value).unwrap(), - leaf_higher_range_value: bigint_to_be_bytes_array::<32>(&higher_range_value).unwrap(), - leaf_index: low_element.index.into(), - next_index: low_element.next_index(), - merkle_proof, - }) - } - - pub fn verify_non_inclusion_proof( - &self, - proof: &NonInclusionProof, - ) -> Result<(), IndexedReferenceMerkleTreeError> { - let value_big_int = BigUint::from_bytes_be(&proof.value); - let lower_end_value = BigUint::from_bytes_be(&proof.leaf_lower_range_value); - if lower_end_value >= value_big_int { - return Err(IndexedReferenceMerkleTreeError::NonInclusionProofFailedLowerBoundViolated); - } - let higher_end_value = BigUint::from_bytes_be(&proof.leaf_higher_range_value); - if higher_end_value <= value_big_int { - return Err( - IndexedReferenceMerkleTreeError::NonInclusionProofFailedHigherBoundViolated, - ); - } - - let array_element = IndexedElement:: { - value: lower_end_value, - index: proof.leaf_index, - next_index: proof.next_index, - }; - let leaf_hash = array_element.hash::(&higher_end_value)?; - self.merkle_tree - .verify(&leaf_hash, &proof.merkle_proof, proof.leaf_index) - .unwrap(); - Ok(()) - } -} - -// TODO: check why next_index is usize while index is I -/// We prove non-inclusion by: -/// 1. Showing that value is greater than leaf_lower_range_value and less than leaf_higher_range_value -/// 2. Showing that the leaf_hash H(leaf_lower_range_value, leaf_next_index, leaf_higher_value) is included in the root (Merkle tree) -#[derive(Debug)] -pub struct NonInclusionProof { - pub root: [u8; 32], - pub value: [u8; 32], - pub leaf_lower_range_value: [u8; 32], - pub leaf_higher_range_value: [u8; 32], - pub leaf_index: usize, - pub next_index: usize, - pub merkle_proof: BoundedVec<[u8; 32]>, -} diff --git a/indexed/src/zero_copy.rs b/indexed/src/zero_copy.rs deleted file mode 100644 index 1bb8b37..0000000 --- a/indexed/src/zero_copy.rs +++ /dev/null @@ -1,343 +0,0 @@ -use std::{ - fmt, - marker::PhantomData, - mem, - ops::{Deref, DerefMut}, -}; - -use light_bounded_vec::{CyclicBoundedVec, CyclicBoundedVecMetadata}; -use light_concurrent_merkle_tree::{ - errors::ConcurrentMerkleTreeError, - zero_copy::{ConcurrentMerkleTreeZeroCopy, ConcurrentMerkleTreeZeroCopyMut}, - ConcurrentMerkleTree, -}; -use light_hasher::Hasher; -use light_utils::offset::zero_copy::{read_array_like_ptr_at, read_ptr_at, write_at}; -use num_traits::{CheckedAdd, CheckedSub, ToBytes, Unsigned}; - -use crate::{errors::IndexedMerkleTreeError, IndexedMerkleTree}; - -#[derive(Debug)] -pub struct IndexedMerkleTreeZeroCopy<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - pub merkle_tree: mem::ManuallyDrop>, - // The purpose of this field is ensuring that the wrapper does not outlive - // the buffer. - _bytes: &'a [u8], -} - -impl<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize> - IndexedMerkleTreeZeroCopy<'a, H, I, HEIGHT, NET_HEIGHT> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - /// Returns a zero-copy wrapper of `IndexedMerkleTree` created from the - /// data in the provided `bytes` buffer. - pub fn from_bytes_zero_copy(bytes: &'a [u8]) -> Result { - let (merkle_tree, mut offset) = - ConcurrentMerkleTreeZeroCopy::struct_from_bytes_zero_copy(bytes)?; - - let indexed_changelog_metadata: *mut CyclicBoundedVecMetadata = - unsafe { read_ptr_at(bytes, &mut offset) }; - - let expected_size = IndexedMerkleTree::::size_in_account( - merkle_tree.height, - merkle_tree.changelog.capacity(), - merkle_tree.roots.capacity(), - merkle_tree.canopy_depth, - unsafe { (*indexed_changelog_metadata).capacity() }, - ); - if bytes.len() < expected_size { - return Err(IndexedMerkleTreeError::ConcurrentMerkleTree( - ConcurrentMerkleTreeError::BufferSize(expected_size, bytes.len()), - )); - } - - let indexed_changelog = unsafe { - CyclicBoundedVec::from_raw_parts( - indexed_changelog_metadata, - read_array_like_ptr_at( - bytes, - &mut offset, - (*indexed_changelog_metadata).capacity(), - ), - ) - }; - - Ok(Self { - merkle_tree: mem::ManuallyDrop::new(IndexedMerkleTree { - merkle_tree, - indexed_changelog, - _index: PhantomData, - }), - _bytes: bytes, - }) - } -} - -impl Deref - for IndexedMerkleTreeZeroCopy<'_, H, I, HEIGHT, NET_HEIGHT> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - type Target = IndexedMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.merkle_tree - } -} - -#[derive(Debug)] -pub struct IndexedMerkleTreeZeroCopyMut<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize>( - IndexedMerkleTreeZeroCopy<'a, H, I, HEIGHT, NET_HEIGHT>, -) -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From; - -impl<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize> - IndexedMerkleTreeZeroCopyMut<'a, H, I, HEIGHT, NET_HEIGHT> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - pub fn from_bytes_zero_copy_mut(bytes: &'a mut [u8]) -> Result { - Ok(Self(IndexedMerkleTreeZeroCopy::from_bytes_zero_copy( - bytes, - )?)) - } - - pub fn from_bytes_zero_copy_init( - bytes: &'a mut [u8], - height: usize, - canopy_depth: usize, - changelog_capacity: usize, - roots_capacity: usize, - indexed_changelog_capacity: usize, - ) -> Result { - let _ = ConcurrentMerkleTreeZeroCopyMut::::fill_non_dyn_fields_in_buffer( - bytes, - height, - canopy_depth, - changelog_capacity, - roots_capacity, - )?; - - let expected_size = IndexedMerkleTree::::size_in_account( - height, - changelog_capacity, - roots_capacity, - canopy_depth, - indexed_changelog_capacity, - ); - if bytes.len() < expected_size { - return Err(IndexedMerkleTreeError::ConcurrentMerkleTree( - ConcurrentMerkleTreeError::BufferSize(expected_size, bytes.len()), - )); - } - - let mut offset = ConcurrentMerkleTree::::size_in_account( - height, - changelog_capacity, - roots_capacity, - canopy_depth, - ); - - let indexed_changelog_metadata = CyclicBoundedVecMetadata::new(indexed_changelog_capacity); - write_at::( - bytes, - &indexed_changelog_metadata.to_le_bytes(), - &mut offset, - ); - - Self::from_bytes_zero_copy_mut(bytes) - } -} - -impl Deref - for IndexedMerkleTreeZeroCopyMut<'_, H, I, HEIGHT, NET_HEIGHT> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - type Target = IndexedMerkleTree; - - fn deref(&self) -> &Self::Target { - &self.0.merkle_tree - } -} - -impl DerefMut - for IndexedMerkleTreeZeroCopyMut<'_, H, I, HEIGHT, NET_HEIGHT> -where - H: Hasher, - I: CheckedAdd - + CheckedSub - + Copy - + Clone - + fmt::Debug - + PartialOrd - + ToBytes - + TryFrom - + Unsigned, - usize: From, -{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0.merkle_tree - } -} - -#[cfg(test)] -mod test { - use light_hasher::Poseidon; - use light_utils::bigint::bigint_to_be_bytes_array; - use num_bigint::RandBigInt; - use rand::thread_rng; - - use super::*; - - fn from_bytes_zero_copy< - const HEIGHT: usize, - const NET_HEIGHT: usize, - const CHANGELOG_SIZE: usize, - const ROOTS: usize, - const CANOPY_DEPTH: usize, - const INDEXED_CHANGELOG_SIZE: usize, - const OPERATIONS: usize, - >() { - let mut mt_1 = IndexedMerkleTree::::new( - HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE, - ) - .unwrap(); - mt_1.init().unwrap(); - - let mut bytes = vec![ - 0u8; - IndexedMerkleTree::::size_in_account( - HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE - ) - ]; - - { - let mut mt_2 = - IndexedMerkleTreeZeroCopyMut::::from_bytes_zero_copy_init( - &mut bytes, - HEIGHT, - CANOPY_DEPTH, - CHANGELOG_SIZE, - ROOTS, - INDEXED_CHANGELOG_SIZE, - ) - .unwrap(); - mt_2.init().unwrap(); - - assert_eq!(mt_1, *mt_2); - } - - let mut rng = thread_rng(); - - for _ in 0..OPERATIONS { - // Reload the tree from bytes on each iteration. - let mut mt_2 = - IndexedMerkleTreeZeroCopyMut::::from_bytes_zero_copy_mut( - &mut bytes, - ) - .unwrap(); - - let leaf: [u8; 32] = bigint_to_be_bytes_array::<32>(&rng.gen_biguint(248)).unwrap(); - mt_1.append(&leaf).unwrap(); - mt_2.append(&leaf).unwrap(); - - assert_eq!(mt_1, *mt_2); - } - } - - #[test] - fn test_from_bytes_zero_copy_26_1400_2400_10_256_1024() { - const HEIGHT: usize = 26; - const NET_HEIGHT: usize = 16; - const CHANGELOG_SIZE: usize = 1400; - const ROOTS: usize = 2400; - const CANOPY_DEPTH: usize = 10; - const INDEXED_CHANGELOG_SIZE: usize = 256; - - const OPERATIONS: usize = 1024; - - from_bytes_zero_copy::< - HEIGHT, - NET_HEIGHT, - CHANGELOG_SIZE, - ROOTS, - CANOPY_DEPTH, - INDEXED_CHANGELOG_SIZE, - OPERATIONS, - >() - } -} diff --git a/indexed/tests/tests.rs b/indexed/tests/tests.rs deleted file mode 100644 index e0f919e..0000000 --- a/indexed/tests/tests.rs +++ /dev/null @@ -1,960 +0,0 @@ -use std::cell::{Ref, RefCell, RefMut}; - -use light_bounded_vec::BoundedVec; -use light_concurrent_merkle_tree::{ - errors::ConcurrentMerkleTreeError, - event::IndexedMerkleTreeUpdate, - light_hasher::{Hasher, Poseidon}, -}; -use light_hash_set::{HashSet, HashSetError}; -use light_indexed_merkle_tree::{ - array::{IndexedArray, IndexedElement}, - errors::IndexedMerkleTreeError, - reference, IndexedMerkleTree, HIGHEST_ADDRESS_PLUS_ONE, -}; -use light_utils::bigint::bigint_to_be_bytes_array; -use num_bigint::{BigUint, RandBigInt, ToBigUint}; -use num_traits::{FromBytes, Num}; -use rand::thread_rng; -use thiserror::Error; - -const MERKLE_TREE_HEIGHT: usize = 4; -const MERKLE_TREE_CHANGELOG: usize = 256; -const MERKLE_TREE_ROOTS: usize = 1024; -const MERKLE_TREE_CANOPY: usize = 0; -const MERKLE_TREE_INDEXED_CHANGELOG: usize = 64; -const NET_HEIGHT: usize = MERKLE_TREE_HEIGHT - MERKLE_TREE_CANOPY; - -const QUEUE_ELEMENTS: usize = 1024; -const SAFETY_MARGIN: usize = 10; - -const NR_NULLIFIERS: usize = 2; - -/// A mock function which imitates a Merkle tree program instruction for -/// inserting nullifiers into the queue. -fn program_insert( - // PDA - mut queue: RefMut<'_, HashSet>, - merkle_tree: Ref<'_, IndexedMerkleTree>, - // Instruction data - nullifiers: [[u8; 32]; NR_NULLIFIERS], -) -> Result<(), HashSetError> -where - H: Hasher, -{ - for nullifier in nullifiers.iter() { - let nf = BigUint::from_be_bytes(nullifier.as_slice()); - queue.insert(&nf, merkle_tree.sequence_number())?; - } - Ok(()) -} - -#[derive(Error, Debug)] -enum RelayerUpdateError { - #[error("Updating Merkle tree failed, {0:?}")] - MerkleTreeUpdate(Vec), -} - -/// A mock function which imitates a Merkle tree program instruction for -/// inserting nullifiers from the queue to the tree. -#[allow(clippy::too_many_arguments)] -fn program_update( - // PDAs - queue: &mut RefMut<'_, HashSet>, - merkle_tree: &mut RefMut<'_, IndexedMerkleTree>, - // Instruction data - changelog_index: u16, - indexed_changelog_index: u16, - queue_index: u16, - low_nullifier: IndexedElement, - low_nullifier_next_value: &BigUint, - low_nullifier_proof: &mut BoundedVec<[u8; 32]>, -) -> Result, IndexedMerkleTreeError> -where - H: Hasher, -{ - // Get the nullifier from the queue. - let nullifier = queue - .get_unmarked_bucket(queue_index as usize) - .unwrap() - .unwrap(); - - // Update the Merkle tree. - let update = merkle_tree.update( - usize::from(changelog_index), - usize::from(indexed_changelog_index), - nullifier.value_biguint(), - low_nullifier.clone(), - low_nullifier_next_value.clone(), - low_nullifier_proof, - )?; - - // Mark the nullifier. - queue - .mark_with_sequence_number(queue_index as usize, merkle_tree.sequence_number()) - .unwrap(); - - Ok(update) -} - -// TODO: unify these helpers with MockBatchedForester -/// A mock function which imitates a relayer endpoint for updating the -/// nullifier Merkle tree. -fn relayer_update( - // PDAs - queue: &mut RefMut<'_, HashSet>, - merkle_tree: &mut RefMut<'_, IndexedMerkleTree>, -) -> Result<(), RelayerUpdateError> -where - H: Hasher, -{ - let mut relayer_indexing_array = IndexedArray::::default(); - let mut relayer_merkle_tree = - reference::IndexedMerkleTree::::new(MERKLE_TREE_HEIGHT, MERKLE_TREE_CANOPY) - .unwrap(); - - let mut update_errors: Vec = Vec::new(); - - let queue_indices = queue.iter().map(|(index, _)| index).collect::>(); - for queue_index in queue_indices { - let changelog_index = merkle_tree.changelog_index(); - let indexed_changelog_index = merkle_tree.indexed_changelog_index(); - - let queue_element = queue.get_unmarked_bucket(queue_index).unwrap().unwrap(); - - // Create new element from the dequeued value. - let (old_low_nullifier, old_low_nullifier_next_value) = relayer_indexing_array - .find_low_element_for_nonexistent(&queue_element.value_biguint()) - .unwrap(); - let nullifier_bundle = relayer_indexing_array - .new_element_with_low_element_index( - old_low_nullifier.index, - &queue_element.value_biguint(), - ) - .unwrap(); - let mut low_nullifier_proof = relayer_merkle_tree - .get_proof_of_leaf(old_low_nullifier.index, false) - .unwrap(); - - // Update on-chain tree. - let update_successful = match program_update( - queue, - merkle_tree, - changelog_index as u16, - indexed_changelog_index as u16, - queue_index as u16, - old_low_nullifier, - &old_low_nullifier_next_value, - &mut low_nullifier_proof, - ) { - Ok(event) => { - assert_eq!( - event.new_low_element.index, - nullifier_bundle.new_low_element.index - ); - assert_eq!( - event.new_low_element.next_index, - nullifier_bundle.new_low_element.next_index - ); - assert_eq!( - event.new_low_element.value, - bigint_to_be_bytes_array::<32>(&nullifier_bundle.new_low_element.value) - .unwrap() - ); - assert_eq!( - event.new_low_element.next_value, - bigint_to_be_bytes_array::<32>(&nullifier_bundle.new_element.value).unwrap() - ); - let leaf_hash = nullifier_bundle - .new_low_element - .hash::(&nullifier_bundle.new_element.value) - .unwrap(); - assert_eq!(event.new_low_element_hash, leaf_hash); - let leaf_hash = nullifier_bundle - .new_element - .hash::(&nullifier_bundle.new_element_next_value) - .unwrap(); - assert_eq!(event.new_high_element_hash, leaf_hash); - assert_eq!( - event.new_high_element.index, - nullifier_bundle.new_element.index - ); - assert_eq!( - event.new_high_element.next_index, - nullifier_bundle.new_element.next_index - ); - assert_eq!( - event.new_high_element.value, - bigint_to_be_bytes_array::<32>(&nullifier_bundle.new_element.value).unwrap() - ); - assert_eq!( - event.new_high_element.next_value, - bigint_to_be_bytes_array::<32>(&nullifier_bundle.new_element_next_value) - .unwrap() - ); - true - } - Err(e) => { - update_errors.push(e); - false - } - }; - - // Check if the on-chain Merkle tree was really updated. - if update_successful { - // Update off-chain tree. - relayer_merkle_tree - .update( - &nullifier_bundle.new_low_element, - &nullifier_bundle.new_element, - &nullifier_bundle.new_element_next_value, - ) - .unwrap(); - - let low_nullifier_leaf = nullifier_bundle - .new_low_element - .hash::(&nullifier_bundle.new_element.value) - .unwrap(); - let low_nullifier_proof = relayer_merkle_tree - .get_proof_of_leaf(nullifier_bundle.new_low_element.index(), false) - .unwrap(); - merkle_tree - .validate_proof( - &low_nullifier_leaf, - nullifier_bundle.new_low_element.index(), - &low_nullifier_proof, - ) - .unwrap(); - - let new_nullifier_leaf = nullifier_bundle - .new_element - .hash::(&nullifier_bundle.new_element_next_value) - .unwrap(); - let new_nullifier_proof = relayer_merkle_tree - .get_proof_of_leaf(nullifier_bundle.new_element.index(), false) - .unwrap(); - merkle_tree - .validate_proof( - &new_nullifier_leaf, - nullifier_bundle.new_element.index(), - &new_nullifier_proof, - ) - .unwrap(); - - // Insert the element to the indexing array. - relayer_indexing_array - .append_with_low_element_index( - nullifier_bundle.new_low_element.index, - &nullifier_bundle.new_element.value, - ) - .unwrap(); - } - } - - if update_errors.is_empty() { - Ok(()) - } else { - Err(RelayerUpdateError::MerkleTreeUpdate(update_errors)) - } -} - -/// Tests the valid case of: -/// -/// * Inserting nullifiers to the queue. -/// * Calling the relayer to update the on-chain nullifier Merkle tree. -fn insert_and_update() -where - H: Hasher, -{ - // On-chain PDAs. - let onchain_queue: RefCell = - RefCell::new(HashSet::new(QUEUE_ELEMENTS, MERKLE_TREE_ROOTS + SAFETY_MARGIN).unwrap()); - let onchain_tree: RefCell> = - RefCell::new( - IndexedMerkleTree::new( - MERKLE_TREE_HEIGHT, - MERKLE_TREE_CHANGELOG, - MERKLE_TREE_ROOTS, - MERKLE_TREE_CANOPY, - MERKLE_TREE_INDEXED_CHANGELOG, - ) - .unwrap(), - ); - onchain_tree.borrow_mut().init().unwrap(); - - // Insert a pair of nullifiers. - let nullifier1 = 30_u32.to_biguint().unwrap(); - let nullifier2 = 10_u32.to_biguint().unwrap(); - program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [ - bigint_to_be_bytes_array(&nullifier1).unwrap(), - bigint_to_be_bytes_array(&nullifier2).unwrap(), - ], - ) - .unwrap(); - - // Insert an another pair of nullifiers. - let nullifier3 = 20_u32.to_biguint().unwrap(); - let nullifier4 = 50_u32.to_biguint().unwrap(); - program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [ - bigint_to_be_bytes_array(&nullifier3).unwrap(), - bigint_to_be_bytes_array(&nullifier4).unwrap(), - ], - ) - .unwrap(); - - // Call relayer to update the tree. - relayer_update::( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - ) - .unwrap(); -} - -#[test] -pub fn test_insert_and_update_poseidon() { - insert_and_update::() -} - -/// Tests the invalid case of inserting the same nullifiers multiple times into -/// the queue and Merkle tree - an attempt of double spending. -fn double_spend() -where - H: Hasher, -{ - // On-chain PDAs. - let onchain_queue: RefCell = RefCell::new(HashSet::new(20, 0).unwrap()); - let onchain_tree: RefCell> = - RefCell::new( - IndexedMerkleTree::new( - MERKLE_TREE_HEIGHT, - MERKLE_TREE_CHANGELOG, - MERKLE_TREE_ROOTS, - MERKLE_TREE_CANOPY, - MERKLE_TREE_INDEXED_CHANGELOG, - ) - .unwrap(), - ); - onchain_tree.borrow_mut().init().unwrap(); - - // Insert a pair of nulifiers. - let nullifier1 = 30_u32.to_biguint().unwrap(); - let nullifier1: [u8; 32] = bigint_to_be_bytes_array(&nullifier1).unwrap(); - let nullifier2 = 10_u32.to_biguint().unwrap(); - let nullifier2: [u8; 32] = bigint_to_be_bytes_array(&nullifier2).unwrap(); - program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [nullifier1, nullifier2], - ) - .unwrap(); - - // Try inserting the same pair into the queue. It should fail with an error. - let res = program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [nullifier1, nullifier2], - ); - assert!(matches!(res, Err(HashSetError::ElementAlreadyExists))); - - // Update the on-chain tree (so it contains the nullifiers we inserted). - relayer_update::( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - ) - .unwrap(); - - // The nullifiers are in the tree and not in the queue anymore. We can try - // our luck with double-spending again. - program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [nullifier1, nullifier2], - ) - .unwrap(); - // At the same time, insert also some new nullifiers which aren't spent - // yet. We want to make sure that they will be processed successfully and - // only the invalid nullifiers will produce errors. - let nullifier3 = 25_u32.to_biguint().unwrap(); - let nullifier4 = 5_u32.to_biguint().unwrap(); - program_insert::( - onchain_queue.borrow_mut(), - onchain_tree.borrow(), - [ - bigint_to_be_bytes_array(&nullifier3).unwrap(), - bigint_to_be_bytes_array(&nullifier4).unwrap(), - ], - ) - .unwrap(); - // We expect exactly two errors (for the invalid nullifiers). No more, no - // less. - let res = relayer_update::( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - ); - assert!(matches!(res, Err(RelayerUpdateError::MerkleTreeUpdate(_)))); -} - -#[test] -pub fn test_double_spend_queue_poseidon() { - double_spend::() -} - -/// Try to insert a nullifier to the tree while pointing to an invalid low -/// nullifier. -/// -/// Such invalid insertion needs to be performed manually, without relayer's -/// help (which would always insert that nullifier correctly). -fn insert_invalid_low_element() -where - H: Hasher, -{ - // On-chain PDAs. - let onchain_queue: RefCell = - RefCell::new(HashSet::new(QUEUE_ELEMENTS, MERKLE_TREE_ROOTS + SAFETY_MARGIN).unwrap()); - let onchain_tree: RefCell> = - RefCell::new( - IndexedMerkleTree::new( - MERKLE_TREE_HEIGHT, - MERKLE_TREE_CHANGELOG, - MERKLE_TREE_ROOTS, - MERKLE_TREE_CANOPY, - MERKLE_TREE_INDEXED_CHANGELOG, - ) - .unwrap(), - ); - onchain_tree.borrow_mut().init().unwrap(); - - // Local artifacts. - let mut local_indexed_array = IndexedArray::::default(); - let mut local_merkle_tree = - reference::IndexedMerkleTree::::new(MERKLE_TREE_HEIGHT, MERKLE_TREE_CANOPY) - .unwrap(); - - // Insert a pair of nullifiers, correctly. Just do it with relayer. - let nullifier1 = 30_u32.to_biguint().unwrap(); - let nullifier2 = 10_u32.to_biguint().unwrap(); - onchain_queue - .borrow_mut() - .insert(&nullifier1, onchain_tree.borrow().sequence_number()) - .unwrap(); - onchain_queue - .borrow_mut() - .insert(&nullifier2, onchain_tree.borrow().sequence_number()) - .unwrap(); - let nullifier_bundle = local_indexed_array.append(&nullifier1).unwrap(); - local_merkle_tree - .update( - &nullifier_bundle.new_low_element, - &nullifier_bundle.new_element, - &nullifier_bundle.new_element_next_value, - ) - .unwrap(); - let nullifier_bundle = local_indexed_array.append(&nullifier2).unwrap(); - local_merkle_tree - .update( - &nullifier_bundle.new_low_element, - &nullifier_bundle.new_element, - &nullifier_bundle.new_element_next_value, - ) - .unwrap(); - relayer_update( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - ) - .unwrap(); - - // Try inserting nullifier 20, while pointing to index 1 (value 30) as low - // nullifier. Point to index 2 (value 10) as next value. - // Therefore, the new element is lowe than the supposed low element. - let nullifier3 = 20_u32.to_biguint().unwrap(); - onchain_queue - .borrow_mut() - .insert(&nullifier3, onchain_tree.borrow().sequence_number()) - .unwrap(); - let changelog_index = onchain_tree.borrow().changelog_index(); - let indexed_changelog_index = onchain_tree.borrow().indexed_changelog_index(); - // Index of our new nullifier in the queue. - let queue_index = onchain_queue - .borrow() - .find_element_index(&nullifier3, None) - .unwrap() - .unwrap(); - // (Invalid) low nullifier. - let low_nullifier = local_indexed_array.get(1).cloned().unwrap(); - let low_nullifier_next_value = local_indexed_array - .get(low_nullifier.next_index) - .cloned() - .unwrap() - .value; - let mut low_nullifier_proof = local_merkle_tree.get_proof_of_leaf(1, false).unwrap(); - assert!(matches!( - program_update( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - changelog_index as u16, - indexed_changelog_index as u16, - queue_index as u16, - low_nullifier, - &low_nullifier_next_value, - &mut low_nullifier_proof, - ), - Err(IndexedMerkleTreeError::LowElementGreaterOrEqualToNewElement) - )); - - // Try inserting nullifier 50, while pointing to index 0 as low nullifier. - // Therefore, the new element is greate than next element. - let nullifier3 = 50_u32.to_biguint().unwrap(); - onchain_queue - .borrow_mut() - .insert(&nullifier3, onchain_tree.borrow().sequence_number()) - .unwrap(); - let changelog_index = onchain_tree.borrow().changelog_index(); - let indexed_changelog_index = onchain_tree.borrow().indexed_changelog_index(); - // Index of our new nullifier in the queue. - let queue_index = onchain_queue - .borrow() - .find_element_index(&nullifier3, None) - .unwrap() - .unwrap(); - // (Invalid) low nullifier. - let low_nullifier = local_indexed_array.get(0).cloned().unwrap(); - let low_nullifier_next_value = local_indexed_array - .get(low_nullifier.next_index) - .cloned() - .unwrap() - .value; - let mut low_nullifier_proof = local_merkle_tree.get_proof_of_leaf(0, false).unwrap(); - assert!(matches!( - program_update( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - changelog_index as u16, - indexed_changelog_index as u16, - queue_index as u16, - low_nullifier, - &low_nullifier_next_value, - &mut low_nullifier_proof, - ), - Err(IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement) - )); - let nullifier4 = 45_u32.to_biguint().unwrap(); - onchain_queue - .borrow_mut() - .insert(&nullifier4, onchain_tree.borrow().sequence_number()) - .unwrap(); - let changelog_index = onchain_tree.borrow().changelog_index(); - let indexed_changelog_index = onchain_tree.borrow().indexed_changelog_index(); - let (low_nullifier, low_nullifier_next_value) = local_indexed_array - .find_low_element_for_nonexistent(&nullifier4) - .unwrap(); - let mut low_nullifier_proof = local_merkle_tree - .get_proof_of_leaf(low_nullifier.index(), false) - .unwrap(); - let result = program_update( - &mut onchain_queue.borrow_mut(), - &mut onchain_tree.borrow_mut(), - changelog_index as u16, - indexed_changelog_index as u16, - queue_index as u16, - low_nullifier, - &low_nullifier_next_value, - &mut low_nullifier_proof, - ); - println!("result {:?}", result); - assert!(matches!( - result, - Err(IndexedMerkleTreeError::ConcurrentMerkleTree( - ConcurrentMerkleTreeError::InvalidProof(_, _) - )) - )); -} - -#[test] -pub fn test_insert_invalid_low_element_poseidon() { - insert_invalid_low_element::() -} - -#[test] -pub fn hash_reference_indexed_element() { - let element = IndexedElement:: { - value: 0.to_biguint().unwrap(), - index: 0, - next_index: 1, - }; - - let next_value = BigUint::from_str_radix(HIGHEST_ADDRESS_PLUS_ONE, 10).unwrap(); - let hash = element.hash::(&next_value).unwrap(); - assert_eq!( - hash, - [ - 40, 8, 192, 134, 75, 198, 77, 187, 129, 249, 133, 121, 54, 189, 242, 28, 117, 71, 255, - 32, 155, 52, 136, 196, 99, 146, 204, 174, 160, 238, 0, 110 - ] - ); -} - -#[test] -pub fn functional_non_inclusion_test() { - let mut relayer_indexing_array = IndexedArray::::default(); - - // appends the first element - let mut relayer_merkle_tree = reference::IndexedMerkleTree::::new( - MERKLE_TREE_HEIGHT, - MERKLE_TREE_CANOPY, - ) - .unwrap(); - let nullifier1 = 30_u32.to_biguint().unwrap(); - relayer_merkle_tree - .append(&nullifier1, &mut relayer_indexing_array) - .unwrap(); - // indexed array: - // element: 0 - // value: 0 - // next_value: 30 - // index: 0 - // element: 1 - // value: 30 - // next_value: 0 - // index: 1 - // merkle tree: - // leaf index: 0 = H(0, 1, 30) //Hash(value, next_index, next_value) - // leaf index: 1 = H(30, 0, 0) - let indexed_array_element_0 = relayer_indexing_array.get(0).unwrap(); - assert_eq!(indexed_array_element_0.value, 0_u32.to_biguint().unwrap()); - assert_eq!(indexed_array_element_0.next_index, 1); - assert_eq!(indexed_array_element_0.index, 0); - let indexed_array_element_1 = relayer_indexing_array.get(1).unwrap(); - assert_eq!(indexed_array_element_1.value, 30_u32.to_biguint().unwrap()); - assert_eq!(indexed_array_element_1.next_index, 0); - assert_eq!(indexed_array_element_1.index, 1); - - let leaf_0 = relayer_merkle_tree.merkle_tree.leaf(0); - let leaf_1 = relayer_merkle_tree.merkle_tree.leaf(1); - assert_eq!( - leaf_0, - Poseidon::hashv(&[ - &0_u32.to_biguint().unwrap().to_bytes_be(), - &1_u32.to_biguint().unwrap().to_bytes_be(), - &30_u32.to_biguint().unwrap().to_bytes_be() - ]) - .unwrap() - ); - assert_eq!( - leaf_1, - Poseidon::hashv(&[ - &30_u32.to_biguint().unwrap().to_bytes_be(), - &0_u32.to_biguint().unwrap().to_bytes_be(), - &0_u32.to_biguint().unwrap().to_bytes_be() - ]) - .unwrap() - ); - - let non_inclusion_proof = relayer_merkle_tree - .get_non_inclusion_proof(&10_u32.to_biguint().unwrap(), &relayer_indexing_array) - .unwrap(); - assert_eq!(non_inclusion_proof.root, relayer_merkle_tree.root()); - assert_eq!( - non_inclusion_proof.value, - bigint_to_be_bytes_array::<32>(&10_u32.to_biguint().unwrap()).unwrap() - ); - assert_eq!(non_inclusion_proof.leaf_lower_range_value, [0; 32]); - assert_eq!( - non_inclusion_proof.leaf_higher_range_value, - bigint_to_be_bytes_array::<32>(&30_u32.to_biguint().unwrap()).unwrap() - ); - assert_eq!(non_inclusion_proof.leaf_index, 0); - - relayer_merkle_tree - .verify_non_inclusion_proof(&non_inclusion_proof) - .unwrap(); -} - -/// Performs conflicting Merkle tree updates where: -/// -/// 1. Party one inserts 30. -/// 2. Party two inserts 10. -/// -/// In this case, party two needs to update: -/// -/// * The inserted element (10) to point to 30 as the next one. -#[test] -fn functional_changelog_test_1() { - let address_1 = 30_u32.to_biguint().unwrap(); - let address_2 = 10_u32.to_biguint().unwrap(); - let address_3 = 11_u32.to_biguint().unwrap(); - const HEIGHT: usize = 10; - perform_change_log_test::(&[ - address_1, address_2, address_3, - ]); -} - -/// Performs conflicting Merkle tree updates where: -/// -/// 1. Party one inserts 10. -/// 2. Party two inserts 30. -/// -/// In this case, party two needs to update: -/// -/// * The low element from 0 to 10. -#[test] -fn functional_changelog_test_2() { - let address_1 = 10_u32.to_biguint().unwrap(); - let address_2 = 30_u32.to_biguint().unwrap(); - const HEIGHT: usize = 10; - - perform_change_log_test::(&[address_1, address_2]); -} - -/// Performs conflicting Merkle tree updates where: -/// -/// 1. Party one inserts 30. -/// 2. Party two inserts 10. -/// 3. Party three inserts 20. -/// -/// In this case: -/// -/// * Party one: -/// * Updates the inserted element (10) to point to 30 as the next one. -/// * Party two: -/// * Updates the low element from 0 to 10. -#[test] -fn functional_changelog_test_3() { - let address_1 = 30_u32.to_biguint().unwrap(); - let address_2 = 10_u32.to_biguint().unwrap(); - let address_3 = 20_u32.to_biguint().unwrap(); - const HEIGHT: usize = 10; - - perform_change_log_test::(&[ - address_1, address_2, address_3, - ]); -} - -/// Performs conflicting Merkle tree updates where two parties try to insert -/// the same element. -#[test] -fn functional_changelog_test_double_spend() { - let address = 10_u32.to_biguint().unwrap(); - const HEIGHT: usize = 10; - - perform_change_log_test::(&[ - address.clone(), - address.clone(), - ]); -} - -#[test] -fn functional_changelog_test_random_8_512_512_0_512() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 512; - const ROOTS: usize = 512; - const CANOPY: usize = 0; - const INDEXED_CHANGELOG: usize = 512; - const N_OPERATIONS: usize = (1 << HEIGHT) / 2; - const NET_HEIGHT: usize = HEIGHT - CANOPY; - - functional_changelog_test_random::< - false, - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY, - INDEXED_CHANGELOG, - N_OPERATIONS, - NET_HEIGHT, - >() -} - -/// Performs concurrent updates, where the indexed changelog eventually wraps -/// around. Updates with an old proof and old changelog index are expected to -/// fail. -#[test] -fn functional_changelog_test_random_wrap_around_8_128_512_0_512() { - const HEIGHT: usize = 8; - const CHANGELOG: usize = 512; - const ROOTS: usize = 512; - const CANOPY: usize = 0; - const INDEXED_CHANGELOG: usize = 128; - const N_OPERATIONS: usize = (1 << HEIGHT) / 2; - const NET_HEIGHT: usize = HEIGHT - CANOPY; - for _ in 0..100 { - functional_changelog_test_random::< - true, - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY, - INDEXED_CHANGELOG, - N_OPERATIONS, - NET_HEIGHT, - >() - } -} - -/// Performs `N_OPERATIONS` concurrent updates with random elements. All of them without -/// updating the changelog indices. All of them should result in using indexed changelog -/// for patching the proof. -fn functional_changelog_test_random< - const WRAP_AROUND: bool, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, - const INDEXED_CHANGELOG: usize, - const N_OPERATIONS: usize, - const NET_HEIGHT: usize, ->() { - let mut rng = thread_rng(); - - let leaves: Vec = (0..N_OPERATIONS).map(|_| rng.gen_biguint(248)).collect(); - perform_change_log_test::< - false, - WRAP_AROUND, - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY, - INDEXED_CHANGELOG, - NET_HEIGHT, - >(&leaves); -} - -/// Performs conflicting Merkle tree updates where multiple actors try to add -/// add new ranges when using the same (for the most of actors - outdated) -/// Merkle proofs and changelog indices. -/// -/// Scenario: -/// -/// 1. Two paries start with the same indexed array state. -/// 2. Both parties compute their values with the same indexed Merkle tree -/// state. -/// 3. Party one inserts first. -/// 4. Party two needs to patch the low element, because the low element has -/// changed. -/// 5. Party two inserts. -/// 6. Party N needs to patch the low element, because the low element has -/// changed. -/// 7. Party N inserts. -/// -/// `DOUBLE_SPEND` indicates whether the provided addresses are an attempt to -/// double-spend by the subsequent parties. When set to `true`, we expect -/// subsequent updates to fail. -fn perform_change_log_test< - const DOUBLE_SPEND: bool, - const WRAP_AROUND: bool, - const HEIGHT: usize, - const CHANGELOG: usize, - const ROOTS: usize, - const CANOPY: usize, - const INDEXED_CHANGELOG: usize, - const NET_HEIGHT: usize, ->( - addresses: &[BigUint], -) { - // Initialize the trees and indexed array. - let mut relayer_indexed_array = IndexedArray::::default(); - relayer_indexed_array.init().unwrap(); - let mut relayer_merkle_tree = - reference::IndexedMerkleTree::::new(HEIGHT, CANOPY).unwrap(); - let mut onchain_indexed_merkle_tree = - IndexedMerkleTree::::new( - HEIGHT, - CHANGELOG, - ROOTS, - CANOPY, - INDEXED_CHANGELOG, - ) - .unwrap(); - onchain_indexed_merkle_tree.init().unwrap(); - onchain_indexed_merkle_tree.add_highest_element().unwrap(); - relayer_merkle_tree.init().unwrap(); - assert_eq!( - relayer_merkle_tree.root(), - onchain_indexed_merkle_tree.root(), - "environment setup failed relayer and onchain indexed Merkle tree roots are inconsistent" - ); - - // Perform updates for each actor, where every of them is using the same - // changelog indices, generating a conflict which needs to be solved by - // patching from changelog. - let mut indexed_arrays = vec![relayer_indexed_array.clone(); addresses.len()]; - let changelog_index = onchain_indexed_merkle_tree.changelog_index(); - let indexed_changelog_index = onchain_indexed_merkle_tree.indexed_changelog_index(); - for (i, (address, indexed_array)) in addresses.iter().zip(indexed_arrays.iter_mut()).enumerate() - { - let (old_low_address, old_low_address_next_value) = indexed_array - .find_low_element_for_nonexistent(address) - .unwrap(); - let address_bundle = indexed_array - .new_element_with_low_element_index(old_low_address.index, address) - .unwrap(); - - let mut low_element_proof = relayer_merkle_tree - .get_proof_of_leaf(old_low_address.index, false) - .unwrap(); - - if DOUBLE_SPEND && i > 0 { - let res = onchain_indexed_merkle_tree.update( - changelog_index, - indexed_changelog_index, - address_bundle.new_element.value, - old_low_address, - old_low_address_next_value, - &mut low_element_proof, - ); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::NewElementGreaterOrEqualToNextElement) - )); - } else if WRAP_AROUND && (i + 1) * 2 > INDEXED_CHANGELOG { - // After a wrap-around of the indexed changelog, we expect leaf - // updates to break immediately. - let res = onchain_indexed_merkle_tree.update( - changelog_index, - indexed_changelog_index, - address_bundle.new_element.value.clone(), - old_low_address.clone(), - old_low_address_next_value, - &mut low_element_proof, - ); - println!("changelog_index {:?}", changelog_index); - println!("indexed_changelog_index {:?}", indexed_changelog_index); - println!( - "address_bundle new_element_next_value{:?}", - address_bundle.new_element_next_value - ); - println!( - "address_bundle new_element {:?}", - address_bundle.new_element - ); - - println!("old_low_address {:?}", old_low_address); - println!("res {:?}", res); - assert!(matches!( - res, - Err(IndexedMerkleTreeError::ConcurrentMerkleTree( - ConcurrentMerkleTreeError::CannotUpdateLeaf - )) - )); - } else { - onchain_indexed_merkle_tree - .update( - changelog_index, - indexed_changelog_index, - address_bundle.new_element.value, - old_low_address, - old_low_address_next_value, - &mut low_element_proof, - ) - .unwrap(); - for i in onchain_indexed_merkle_tree.changelog.iter() { - println!("indexed array state element {:?} ", i); - } - } - } -} diff --git a/scripts/lint.sh b/scripts/lint.sh index 0f38685..c0639e4 100755 --- a/scripts/lint.sh +++ b/scripts/lint.sh @@ -2,10 +2,10 @@ set -e -for dir in concurrent bounded-vec hash-set indexed; do +for dir in bounded-vec; do cd $dir cargo fmt -- --check cargo clippy --all-targets -- -D warnings cargo test --all-targets cd .. -done \ No newline at end of file +done