From 9e5a88f9c71f862470fdec748488e60a6bffea1f Mon Sep 17 00:00:00 2001 From: vertose Date: Tue, 3 May 2022 05:56:42 -0400 Subject: [PATCH 1/4] defining transaction models (cherry picked from commit 4d25ff2bf17b4bd4a077fe215b2d1486d2bfd770) --- api/src/types.rs | 2 +- chain/src/chain.rs | 2 +- chain/src/txhashset/utxo_view.rs | 2 +- chain/tests/nrd_validation_rules.rs | 20 ++- core/src/core/transaction.rs | 182 +++++++++++++++++++---- core/src/genesis.rs | 24 +-- core/src/libtx/aggsig.rs | 11 +- core/src/libtx/build.rs | 16 +- core/src/libtx/reward.rs | 8 +- core/tests/block.rs | 3 +- core/tests/core.rs | 10 +- core/tests/transaction.rs | 2 +- core/tests/verifier_cache.rs | 2 +- doc/mwc-noninteractive_transactions.md | 26 ++++ etc/gen_gen/src/bin/gen_gen.rs | 2 +- pool/tests/nrd_kernel_relative_height.rs | 26 +++- 16 files changed, 263 insertions(+), 75 deletions(-) create mode 100644 doc/mwc-noninteractive_transactions.md diff --git a/api/src/types.rs b/api/src/types.rs index 15a8002469..533eedfedb 100644 --- a/api/src/types.rs +++ b/api/src/types.rs @@ -551,7 +551,7 @@ impl TxKernelPrintable { fee, lock_height, excess: k.excess.to_hex(), - excess_sig: (&k.excess_sig.to_raw_data()[..]).to_hex(), + excess_sig: (&k.excess_sig().to_raw_data()[..]).to_hex(), } } } diff --git a/chain/src/chain.rs b/chain/src/chain.rs index 05e653f0b9..0b85c43f63 100644 --- a/chain/src/chain.rs +++ b/chain/src/chain.rs @@ -1439,7 +1439,7 @@ impl Chain { } let mut output_vec: Vec = vec![]; for (ref x, &y) in outputs.1.iter().zip(rangeproofs.1.iter()) { - output_vec.push(Output::new(x.features, x.commitment(), y)); + output_vec.push(Output::new_interactive(x.features, x.commitment(), y)); } Ok((outputs.0, last_index, output_vec)) } diff --git a/chain/src/txhashset/utxo_view.rs b/chain/src/txhashset/utxo_view.rs index 4ed2a8422a..14ce7e7b3d 100644 --- a/chain/src/txhashset/utxo_view.rs +++ b/chain/src/txhashset/utxo_view.rs @@ -155,7 +155,7 @@ impl<'a> UTXOView<'a> { pub fn get_unspent_output_at(&self, pos: u64) -> Result { match self.output_pmmr.get_data(pos) { Some(output_id) => match self.rproof_pmmr.get_data(pos) { - Some(rproof) => Ok(output_id.into_output(rproof)), + Some(rproof) => Ok(output_id.into_output(rproof, None)), None => Err(ErrorKind::RangeproofNotFound(format!("at position {}", pos)).into()), }, None => Err(ErrorKind::OutputNotFound(format!("at position {}", pos)).into()), diff --git a/chain/tests/nrd_validation_rules.rs b/chain/tests/nrd_validation_rules.rs index c52d357c9e..7e91632b20 100644 --- a/chain/tests/nrd_validation_rules.rs +++ b/chain/tests/nrd_validation_rules.rs @@ -22,7 +22,7 @@ use grin_util as util; use self::chain_test_helper::{clean_output_dir, genesis_block, init_chain}; use crate::chain::{Chain, Error, Options}; use crate::core::core::{ - Block, BlockHeader, KernelFeatures, NRDRelativeHeight, Transaction, TxKernel, + Block, BlockHeader, KernelFeatures, KernelProof, NRDRelativeHeight, Transaction, TxKernel, }; use crate::core::libtx::{aggsig, build, reward, ProofBuilder}; use crate::core::{consensus, global, pow}; @@ -119,8 +119,10 @@ fn process_block_nrd_validation() -> Result<(), Error> { let skey = excess.secret_key().unwrap(); kernel.excess = keychain.secp().commit(0, skey).unwrap(); let pubkey = &kernel.excess.to_pubkey().unwrap(); - kernel.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel.verify().unwrap(); let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); @@ -235,8 +237,10 @@ fn process_block_nrd_validation_relative_height_1() -> Result<(), Error> { let skey = excess.secret_key().unwrap(); kernel.excess = keychain.secp().commit(0, skey).unwrap(); let pubkey = &kernel.excess.to_pubkey().unwrap(); - kernel.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel.verify().unwrap(); let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); @@ -334,8 +338,10 @@ fn process_block_nrd_validation_fork() -> Result<(), Error> { let skey = excess.secret_key().unwrap(); kernel.excess = keychain.secp().commit(0, skey).unwrap(); let pubkey = &kernel.excess.to_pubkey().unwrap(); - kernel.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel.verify().unwrap(); let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); diff --git a/core/src/core/transaction.rs b/core/src/core/transaction.rs index e4afbcf1f3..379b3f1323 100644 --- a/core/src/core/transaction.rs +++ b/core/src/core/transaction.rs @@ -14,7 +14,7 @@ //! Transactions -use crate::core::hash::{DefaultHashable, Hashed}; +use crate::core::hash::{DefaultHashable, Hash, Hashed}; use crate::core::verifier_cache::VerifierCache; use crate::core::{committed, Committed}; use crate::libtx::{aggsig, secp_ser}; @@ -456,6 +456,32 @@ impl From for Error { } } +/// A proof of possession of E and X (if present) +#[derive(Serialize, Deserialize, Debug, Clone, Copy)] +#[serde(untagged)] +pub enum KernelProof { + /// Traditional interactive kernels do not contain a stealth excess, + /// so its proof consists only of a simple schnorr signature for E. + Interactive { + /// The signature proving the excess is a valid public key, which signs + /// the features. + #[serde(with = "secp_ser::sig_serde")] + excess_sig: secp::Signature, + }, + /// Kernels for non-interactive transactions include an additional "stealth" excess, + /// and their signatures are 2-key "batch" signatures proving knowledge of E and X. + NonInteractive { + /// The stealth excess X. + #[serde( + serialize_with = "secp_ser::as_hex", + deserialize_with = "secp_ser::commitment_from_hex" + )] + stealth_excess: Commitment, + /// The "batch" signature proving knowledge of the excess and stealth excess. + signature: aggsig::BatchSignature, + }, +} + /// A proof that a transaction sums to zero. Includes both the transaction's /// Pedersen commitment and the signature, that guarantees that the commitments /// amount to zero. @@ -473,10 +499,9 @@ pub struct TxKernel { deserialize_with = "secp_ser::commitment_from_hex" )] pub excess: Commitment, - /// The signature proving the excess is a valid public key, which signs - /// the transaction fee. - #[serde(with = "secp_ser::sig_serde")] - pub excess_sig: secp::Signature, + /// The proof of knowledge of the private excess and optional stealth excess. + #[serde(flatten)] + pub proof: KernelProof, } impl DefaultHashable for TxKernel {} @@ -496,7 +521,7 @@ impl Writeable for TxKernel { fn write(&self, writer: &mut W) -> Result<(), ser::Error> { self.features.write(writer)?; self.excess.write(writer)?; - self.excess_sig.write(writer)?; + self.excess_sig().write(writer)?; Ok(()) } } @@ -506,7 +531,9 @@ impl Readable for TxKernel { Ok(TxKernel { features: KernelFeatures::read(reader)?, excess: Commitment::read(reader)?, - excess_sig: secp::Signature::read(reader)?, + proof: KernelProof::Interactive { + excess_sig: secp::Signature::read(reader)?, + }, }) } } @@ -585,6 +612,16 @@ impl TxKernel { self.excess } + pub fn excess_sig(&self) -> secp::Signature { + match self.proof { + KernelProof::Interactive { excess_sig } => excess_sig, + KernelProof::NonInteractive { + stealth_excess, + signature, + } => signature.get(), + } + } + /// The msg signed as part of the tx kernel. /// Based on kernel features and associated fields (fee and lock_height). pub fn msg_to_sign(&self) -> Result { @@ -598,7 +635,7 @@ impl TxKernel { pub fn verify(&self) -> Result<(), Error> { let secp = static_secp_instance(); let secp = secp.lock(); - let sig = &self.excess_sig; + let sig = &self.excess_sig(); // Verify aggsig directly in libsecp let pubkey = &self.excess.to_pubkey()?; if !aggsig::verify_single( @@ -626,7 +663,7 @@ impl TxKernel { let secp = secp.lock(); for tx_kernel in tx_kernels { - sigs.push(tx_kernel.excess_sig); + sigs.push(tx_kernel.excess_sig()); pubkeys.push(tx_kernel.excess.to_pubkey()?); msgs.push(tx_kernel.msg_to_sign()?); } @@ -638,6 +675,18 @@ impl TxKernel { Ok(()) } + pub fn new_interactive( + features: KernelFeatures, + excess: Commitment, + excess_sig: secp::Signature, + ) -> TxKernel { + TxKernel { + features, + excess: excess, + proof: KernelProof::Interactive { excess_sig }, + } + } + /// Build an empty tx kernel with zero values. pub fn empty() -> TxKernel { TxKernel::with_features(KernelFeatures::Plain { fee: 0 }) @@ -648,7 +697,9 @@ impl TxKernel { TxKernel { features, excess: Commitment::from_vec(vec![0; 33]), - excess_sig: secp::Signature::from_raw_data(&[0; 64]).unwrap(), + proof: KernelProof::Interactive { + excess_sig: secp::Signature::from_raw_data(&[0; 64]).unwrap(), + }, } } } @@ -1554,6 +1605,43 @@ pub fn deaggregate(mk_tx: Transaction, txs: &[Transaction]) -> Result Self { + InputProof::Interactive + } +} + +impl Writeable for InputProof { + fn write(&self, writer: &mut W) -> Result<(), ser::Error> { + Ok(()) + } +} + +impl Readable for InputProof { + fn read(reader: &mut R) -> Result { + Ok(InputProof::Interactive) + } +} + /// A transaction input. /// /// Primarily a reference to an output being spent by the transaction. @@ -1568,6 +1656,9 @@ pub struct Input { deserialize_with = "secp_ser::commitment_from_hex" )] pub commit: Commitment, + /// The input type: Interactive or non-interactive. + #[serde(default)] + pub proof: InputProof, } impl DefaultHashable for Input {} @@ -1584,6 +1675,7 @@ impl From<&OutputIdentifier> for Input { Input { features: out.features, commit: out.commit, + proof: InputProof::Interactive, } } } @@ -1594,6 +1686,7 @@ impl Writeable for Input { fn write(&self, writer: &mut W) -> Result<(), ser::Error> { self.features.write(writer)?; self.commit.write(writer)?; + self.proof.write(writer)?; Ok(()) } } @@ -1604,7 +1697,8 @@ impl Readable for Input { fn read(reader: &mut R) -> Result { let features = OutputFeatures::read(reader)?; let commit = Commitment::read(reader)?; - Ok(Input::new(features, commit)) + let proof = InputProof::read(reader)?; + Ok(Input::new(features, commit, proof)) } } @@ -1615,8 +1709,12 @@ impl Readable for Input { impl Input { /// Build a new input from the data required to identify and verify an /// output being spent. - pub fn new(features: OutputFeatures, commit: Commitment) -> Input { - Input { features, commit } + pub fn new(features: OutputFeatures, commit: Commitment, proof: InputProof) -> Input { + Input { + features, + commit, + proof, + } } /// The input commitment which _partially_ identifies the output being @@ -1756,6 +1854,7 @@ impl From<&[OutputIdentifier]> for Inputs { .map(|out| Input { features: out.features, commit: out.commit, + proof: InputProof::Interactive, }) .collect(); inputs.sort_unstable(); @@ -1875,6 +1974,19 @@ impl Readable for OutputFeatures { } } +/// Public keys R & P and output signature. +#[derive(Debug, Copy, Clone, Serialize, Deserialize)] +pub struct OutputKeys { + /// The ephemeral key R chosen by the sender. + pub ephemeral_pk: secp::PublicKey, + /// The one-time output public key P. + pub output_pk: secp::PublicKey, + /// The proof of possession ρ for key R. + /// Signs the entire output. + #[serde(with = "secp_ser::sig_serde")] + pub sig: secp::Signature, +} + /// Output for a transaction, defining the new ownership of coins that are being /// transferred. The commitment is a blinded value for the output while the /// range proof guarantees the commitment includes a positive value without @@ -1890,6 +2002,9 @@ pub struct Output { deserialize_with = "secp_ser::rangeproof_from_hex" )] pub proof: RangeProof, + /// Public keys and signature. Only included in non-interactive transactions. + #[serde(flatten)] + pub keys: Option, } impl Ord for Output { @@ -1935,6 +2050,7 @@ impl Readable for Output { Ok(Output { identifier: OutputIdentifier::read(reader)?, proof: RangeProof::read(reader)?, + keys: None, }) } } @@ -1953,10 +2069,15 @@ impl OutputFeatures { impl Output { /// Create a new output with the provided features, commitment and rangeproof. - pub fn new(features: OutputFeatures, commit: Commitment, proof: RangeProof) -> Output { + pub fn new_interactive( + features: OutputFeatures, + commit: Commitment, + proof: RangeProof, + ) -> Output { Output { identifier: OutputIdentifier { features, commit }, proof, + keys: None, } } @@ -2069,10 +2190,11 @@ impl OutputIdentifier { } /// Converts this identifier to a full output, provided a RangeProof - pub fn into_output(self, proof: RangeProof) -> Output { + pub fn into_output(self, proof: RangeProof, keys: Option) -> Output { Output { identifier: self, proof, + keys, } } } @@ -2152,7 +2274,9 @@ mod test { let kernel = TxKernel { features: KernelFeatures::Plain { fee: 10 }, excess: commit, - excess_sig: sig.clone(), + proof: KernelProof::Interactive { + excess_sig: sig.clone(), + }, }; // Test explicit protocol version. @@ -2162,7 +2286,7 @@ mod test { let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap(); assert_eq!(kernel2.features, KernelFeatures::Plain { fee: 10 }); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } // Test with "default" protocol version. @@ -2171,7 +2295,7 @@ mod test { let kernel2: TxKernel = ser::deserialize_default(&mut &vec[..]).unwrap(); assert_eq!(kernel2.features, KernelFeatures::Plain { fee: 10 }); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } #[test] @@ -2192,7 +2316,9 @@ mod test { lock_height: 100, }, excess: commit, - excess_sig: sig.clone(), + proof: KernelProof::Interactive { + excess_sig: sig.clone(), + }, }; // Test explicit protocol version. @@ -2202,7 +2328,7 @@ mod test { let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap(); assert_eq!(kernel.features, kernel2.features); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } // Test with "default" protocol version. @@ -2211,7 +2337,7 @@ mod test { let kernel2: TxKernel = ser::deserialize_default(&mut &vec[..]).unwrap(); assert_eq!(kernel.features, kernel2.features); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } #[test] @@ -2234,7 +2360,9 @@ mod test { relative_height: NRDRelativeHeight(100), }, excess: commit, - excess_sig: sig.clone(), + proof: KernelProof::Interactive { + excess_sig: sig.clone(), + }, }; // Test explicit protocol version. @@ -2244,7 +2372,7 @@ mod test { let kernel2: TxKernel = ser::deserialize(&mut &vec[..], version).unwrap(); assert_eq!(kernel.features, kernel2.features); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } // Test with "default" protocol version. @@ -2253,7 +2381,7 @@ mod test { let kernel2: TxKernel = ser::deserialize_default(&mut &vec[..]).unwrap(); assert_eq!(kernel.features, kernel2.features); assert_eq!(kernel2.excess, commit); - assert_eq!(kernel2.excess_sig, sig.clone()); + assert_eq!(kernel2.excess_sig(), sig.clone()); } #[test] @@ -2281,7 +2409,9 @@ mod test { aggsig::sign_single(&keychain.secp(), &msg, &skey, None, Some(&pubkey)).unwrap(); kernel.excess = excess; - kernel.excess_sig = excess_sig; + kernel.proof = KernelProof::Interactive { + excess_sig: excess_sig, + }; // Check the signature verifies. assert_eq!(kernel.verify(), Ok(())); @@ -2340,6 +2470,7 @@ mod test { let input = Input { features: OutputFeatures::Plain, commit, + proof: InputProof::Interactive, }; let block_hash = @@ -2356,6 +2487,7 @@ mod test { let input = Input { features: OutputFeatures::Coinbase, commit, + proof: InputProof::Interactive, }; let short_id = input.short_id(&block_hash, nonce); diff --git a/core/src/genesis.rs b/core/src/genesis.rs index 91011c675e..081bf4e8cd 100644 --- a/core/src/genesis.rs +++ b/core/src/genesis.rs @@ -89,21 +89,21 @@ pub fn genesis_floo() -> core::Block { }, ..Default::default() }); - let kernel = core::TxKernel { - features: core::KernelFeatures::Coinbase, - excess: Commitment::from_vec( + let kernel = core::TxKernel::new_interactive( + core::KernelFeatures::Coinbase, + Commitment::from_vec( util::from_hex("093d0aeae5f6aab0975096fde31e1a21fa42edfc93db318a1064156ace81f54671") .unwrap(), ), - excess_sig: Signature::from_raw_data(&[ + Signature::from_raw_data(&[ 206, 29, 151, 239, 47, 44, 219, 103, 100, 240, 76, 52, 231, 174, 149, 129, 237, 164, 234, 60, 232, 149, 90, 94, 161, 93, 131, 148, 120, 81, 161, 155, 170, 177, 250, 64, 66, 25, 44, 82, 164, 227, 150, 5, 10, 166, 52, 150, 22, 179, 15, 50, 81, 15, 114, 9, 52, 239, 234, 80, 82, 118, 146, 30, ]) .unwrap(), - }; - let output = core::Output::new( + ); + let output = core::Output::new_interactive( core::OutputFeatures::Coinbase, Commitment::from_vec( util::from_hex("0905a2ebf3913c7d378660a7b60e6bda983be451cb1de8779ad0f51f4d2fb079ea") @@ -202,21 +202,21 @@ pub fn genesis_main() -> core::Block { }, ..Default::default() }); - let kernel = core::TxKernel { - features: core::KernelFeatures::Coinbase, - excess: Commitment::from_vec( + let kernel = core::TxKernel::new_interactive( + core::KernelFeatures::Coinbase, + Commitment::from_vec( util::from_hex("08b659fde3a41284819f45415890330272efef7ef991f833a64b746be802c8fd77") .unwrap(), ), - excess_sig: Signature::from_raw_data(&[ + Signature::from_raw_data(&[ 189, 52, 60, 137, 172, 160, 134, 69, 17, 47, 82, 86, 169, 136, 4, 240, 104, 188, 8, 185, 90, 170, 220, 90, 88, 177, 222, 171, 198, 244, 149, 15, 238, 91, 152, 234, 248, 34, 72, 175, 213, 52, 179, 29, 165, 113, 70, 167, 30, 159, 163, 45, 67, 2, 136, 169, 248, 200, 90, 86, 70, 192, 73, 37, ]) .unwrap(), - }; - let output = core::Output::new( + ); + let output = core::Output::new_interactive( core::OutputFeatures::Coinbase, Commitment::from_vec( util::from_hex("089dfcac475c94c978861b3dbef1e37b038cc13f9f78de9a4e14f31ed36e7a54c9") diff --git a/core/src/libtx/aggsig.rs b/core/src/libtx/aggsig.rs index 8595b9fd0c..73131c40e5 100644 --- a/core/src/libtx/aggsig.rs +++ b/core/src/libtx/aggsig.rs @@ -238,7 +238,7 @@ pub fn verify_partial_sig( /// let commit = keychain.commit(value, &key_id, switch).unwrap(); /// let builder = proof::ProofBuilder::new(&keychain); /// let proof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap(); -/// let output = Output::new(OutputFeatures::Coinbase, commit, proof); +/// let output = Output::new_interactive(OutputFeatures::Coinbase, commit, proof); /// let height = 20; /// let over_commit = secp.commit_value(reward(fees, height)).unwrap(); /// let out_commit = output.commitment(); @@ -303,7 +303,7 @@ where /// let commit = keychain.commit(value, &key_id, switch).unwrap(); /// let builder = proof::ProofBuilder::new(&keychain); /// let proof = proof::create(&keychain, &builder, value, &key_id, switch, commit, None).unwrap(); -/// let output = Output::new(OutputFeatures::Coinbase, commit, proof); +/// let output = Output::new_interactive(OutputFeatures::Coinbase, commit, proof); /// let height = 20; /// let over_commit = secp.commit_value(reward(fees, height)).unwrap(); /// let out_commit = output.commitment(); @@ -461,8 +461,15 @@ pub fn sign_with_blinding( } /// A dual-key "batch" Schnorr signature. +#[derive(Serialize, Deserialize, Copy, Clone, PartialEq, Eq, Debug)] pub struct BatchSignature(Signature); +impl BatchSignature { + pub fn get(&self) -> Signature { + self.0 + } +} + /// Creates a "batch" Schnorr signature for two secret keys (sk1, sk2) /// These are nothing more than regular schnorr signatures using a single /// key (sk) that's calculated from sk1 and sk2 using the formula: diff --git a/core/src/libtx/build.rs b/core/src/libtx/build.rs index a827eb9083..a63678f13e 100644 --- a/core/src/libtx/build.rs +++ b/core/src/libtx/build.rs @@ -31,7 +31,9 @@ //! ] //! ) -use crate::core::{Input, KernelFeatures, Output, OutputFeatures, Transaction, TxKernel}; +use crate::core::{ + Input, InputProof, KernelFeatures, KernelProof, Output, OutputFeatures, Transaction, TxKernel, +}; use crate::libtx::proof::{self, ProofBuild}; use crate::libtx::{aggsig, Error}; use keychain::{BlindSum, BlindingFactor, Identifier, Keychain, SwitchCommitmentType}; @@ -71,7 +73,7 @@ where .keychain .commit(value, &key_id, SwitchCommitmentType::Regular)?; // TODO: proper support for different switch commitment schemes - let input = Input::new(features, commit); + let input = Input::new(features, commit, InputProof::Interactive); Ok(( tx.with_input(input), sum.sub_key_id(key_id.to_value_path(value)), @@ -136,7 +138,11 @@ where )?; Ok(( - tx.with_output(Output::new(OutputFeatures::Plain, commit, proof)), + tx.with_output(Output::new_interactive( + OutputFeatures::Plain, + commit, + proof, + )), sum.add_key_id(key_id.to_value_path(value)), )) }, @@ -217,7 +223,9 @@ where let skey = excess.secret_key()?; kernel.excess = keychain.secp().commit(0, skey)?; let pubkey = &kernel.excess.to_pubkey()?; - kernel.excess_sig = aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey))?; + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey))?, + }; kernel.verify()?; transaction_with_kernel(elems, kernel, excess, keychain, builder) } diff --git a/core/src/libtx/reward.rs b/core/src/libtx/reward.rs index 1be313749b..a127539dee 100644 --- a/core/src/libtx/reward.rs +++ b/core/src/libtx/reward.rs @@ -46,7 +46,7 @@ where let proof = proof::create(keychain, builder, value, key_id, switch, commit, None)?; - let output = Output::new(OutputFeatures::Coinbase, commit, proof); + let output = Output::new_interactive(OutputFeatures::Coinbase, commit, proof); let secp = static_secp_instance(); let secp = secp.lock(); @@ -75,10 +75,6 @@ where } }; - let kernel = TxKernel { - features: KernelFeatures::Coinbase, - excess, - excess_sig: sig, - }; + let kernel = TxKernel::new_interactive(KernelFeatures::Coinbase, excess, sig); Ok((output, kernel)) } diff --git a/core/tests/block.rs b/core/tests/block.rs index 2be6d1bf63..9f4292dbd5 100644 --- a/core/tests/block.rs +++ b/core/tests/block.rs @@ -346,7 +346,8 @@ fn remove_coinbase_output_flag() { let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0); let b = new_block(&[], &keychain, &builder, &prev, &key_id); let output = b.outputs()[0]; - let output = Output::new(OutputFeatures::Plain, output.commitment(), output.proof()); + let output = + Output::new_interactive(OutputFeatures::Plain, output.commitment(), output.proof()); let b = Block { body: b.body.replace_outputs(&[output]), ..b diff --git a/core/tests/core.rs b/core/tests/core.rs index 6a5680c46b..3546b354da 100644 --- a/core/tests/core.rs +++ b/core/tests/core.rs @@ -21,8 +21,8 @@ use self::core::core::block::Error::KernelLockHeight; use self::core::core::hash::{Hashed, ZERO_HASH}; use self::core::core::verifier_cache::{LruVerifierCache, VerifierCache}; use self::core::core::{ - aggregate, deaggregate, KernelFeatures, Output, OutputFeatures, OutputIdentifier, Transaction, - TxKernel, Weighting, + aggregate, deaggregate, KernelFeatures, KernelProof, Output, OutputFeatures, OutputIdentifier, + Transaction, TxKernel, Weighting, }; use self::core::libtx::build::{self, initial_tx, input, output, with_excess}; use self::core::libtx::{aggsig, ProofBuilder}; @@ -202,8 +202,10 @@ fn build_two_half_kernels() { let skey = excess.secret_key().unwrap(); kernel.excess = keychain.secp().commit(0, skey).unwrap(); let pubkey = &kernel.excess.to_pubkey().unwrap(); - kernel.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel.verify().unwrap(); let tx1 = build::transaction_with_kernel( diff --git a/core/tests/transaction.rs b/core/tests/transaction.rs index 877d5a02a9..c37f5d852c 100644 --- a/core/tests/transaction.rs +++ b/core/tests/transaction.rs @@ -77,7 +77,7 @@ fn test_output_ser_deser() { let builder = ProofBuilder::new(&keychain); let proof = proof::create(&keychain, &builder, 5, &key_id, switch, commit, None).unwrap(); - let out = Output::new(OutputFeatures::Plain, commit, proof); + let out = Output::new_interactive(OutputFeatures::Plain, commit, proof); let mut vec = vec![]; ser::serialize_default(&mut vec, &out).expect("serialized failed"); diff --git a/core/tests/verifier_cache.rs b/core/tests/verifier_cache.rs index 92bc3763fc..4a73017c87 100644 --- a/core/tests/verifier_cache.rs +++ b/core/tests/verifier_cache.rs @@ -37,7 +37,7 @@ fn test_verifier_cache_rangeproofs() { let builder = proof::ProofBuilder::new(&keychain); let proof = proof::create(&keychain, &builder, 5, &key_id, switch, commit, None).unwrap(); - let out = Output::new(OutputFeatures::Plain, commit, proof); + let out = Output::new_interactive(OutputFeatures::Plain, commit, proof); // Check our output is not verified according to the cache. { diff --git a/doc/mwc-noninteractive_transactions.md b/doc/mwc-noninteractive_transactions.md new file mode 100644 index 0000000000..61df2b7de1 --- /dev/null +++ b/doc/mwc-noninteractive_transactions.md @@ -0,0 +1,26 @@ +An MWC non-interactive transaction consists of: + +* **A list of outputs: tuples of the form out = (Cˆ, πˆ, Rˆ, ρˆ, Pˆ), each implicitly associated to an output address (A, B), composed of:** + + an ephemeral key Rˆ = ˆrG ∈ G, chosen by the sender, which defines two keys as: + (ˆk, qˆ) := H(ˆrA,(A, B)) (note that kˆ and qˆ can be computed from the view key and Rˆ since rˆA = aRˆ) + + a commitment Cˆ := vˆH + qˆG to the coin value vˆ, using randomness qˆ + + a range proof πˆ proving knowledge of an opening (v, q) of Cˆ, with v ∈ [0, vmax] + + a one-time output public key Pˆ ∈ G, computed from kˆ as Pˆ := Bˆ + kG (note that the spend key is required to compute log Pˆ) + + a proof of possession ρˆ of Rˆ with tag Cˆ||πˆ||Pˆ (and possibly a time stamp) +* **A list of inputs of the form (P, D, ψ) where** + + P ∈ G is the one-time public key of the transaction output being spent (each value P is only allowed once in the ledger); + + D ∈ G is the one-time doubling key, chosen by the sender, that “doubles” P + + ψ is a proof of possession of P and D with tag the transaction output being spent +* **The kernel, which is composed of:** + + the supply s ∈ [0, vmax], indicating the amount of money created in the transaction + + the fee f ∈ [0, vmax], indicating the fee paid for the current transaction + + the offset t ∈ ℤp + + the excess E ∈ G, defined as the difference between the commitments in the outputs (including the fee) and the inputs (including the supply), + shifted by the offset. If Ci is the i-th input commitment, that is, the value contained in the output in which Pi appears, then + E := ∑Cˆ + fH − ∑C − sH − tG, + which can be seen as E := E' −tG in terms of the *true excess* E' := ∑Cˆ + fH − ∑C − sH + + a signature σ under E on the empty message ε + + the stealth offset y ∈ ℤp + + the stealth excess X ∈ G, defined as the difference between the ephemeral keys Rˆi from the outputs and the one-time keys Pi from the inputs, shifted by the stealth offset y + X := ∑Rˆ − ∑P − yG + + a proof of possession σ of E and X (with empty tag ε) \ No newline at end of file diff --git a/etc/gen_gen/src/bin/gen_gen.rs b/etc/gen_gen/src/bin/gen_gen.rs index 324891166f..3e3c5a73a9 100644 --- a/etc/gen_gen/src/bin/gen_gen.rs +++ b/etc/gen_gen/src/bin/gen_gen.rs @@ -223,7 +223,7 @@ fn update_genesis_rs(gen: &core::core::Block) { "excess_sig".to_string(), format!( "Signature::from_raw_data(&{:?}).unwrap()", - gen.kernels()[0].excess_sig.to_raw_data().to_vec(), + gen.kernels()[0].excess_sig().to_raw_data().to_vec(), ), )); replacements.push(( diff --git a/pool/tests/nrd_kernel_relative_height.rs b/pool/tests/nrd_kernel_relative_height.rs index 33f156cbde..eb20e98cb2 100644 --- a/pool/tests/nrd_kernel_relative_height.rs +++ b/pool/tests/nrd_kernel_relative_height.rs @@ -17,7 +17,7 @@ pub mod common; use self::core::consensus; use self::core::core::hash::Hashed; use self::core::core::verifier_cache::LruVerifierCache; -use self::core::core::{HeaderVersion, KernelFeatures, NRDRelativeHeight, TxKernel}; +use self::core::core::{HeaderVersion, KernelFeatures, KernelProof, NRDRelativeHeight, TxKernel}; use self::core::global; use self::core::libtx::aggsig; use self::keychain::{BlindingFactor, ExtKeychain, Keychain}; @@ -83,14 +83,18 @@ fn test_nrd_kernel_relative_height() -> Result<(), PoolError> { let skey = excess.secret_key().unwrap(); kernel.excess = keychain.secp().commit(0, skey).unwrap(); let pubkey = &kernel.excess.to_pubkey().unwrap(); - kernel.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel.verify().unwrap(); // Generate a 2nd NRD kernel sharing the same excess commitment but with different signature. let mut kernel2 = kernel.clone(); - kernel2.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)).unwrap(); + kernel2.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding(&keychain.secp(), &msg, &excess, Some(&pubkey)) + .unwrap(), + }; kernel2.verify().unwrap(); let tx1 = test_transaction_with_kernel( @@ -116,9 +120,15 @@ fn test_nrd_kernel_relative_height() -> Result<(), PoolError> { }); let msg_short = kernel_short.msg_to_sign().unwrap(); kernel_short.excess = kernel.excess; - kernel_short.excess_sig = - aggsig::sign_with_blinding(&keychain.secp(), &msg_short, &excess, Some(&pubkey)) - .unwrap(); + kernel_short.proof = KernelProof::Interactive { + excess_sig: aggsig::sign_with_blinding( + &keychain.secp(), + &msg_short, + &excess, + Some(&pubkey), + ) + .unwrap(), + }; kernel_short.verify().unwrap(); let tx3 = test_transaction_with_kernel( From 9eb8a363c0c9ab7666516de4cb718da4df0dd719 Mon Sep 17 00:00:00 2001 From: vertose Date: Tue, 24 May 2022 05:24:35 -0400 Subject: [PATCH 2/4] migrate output positions to id-based keys (cherry picked from commit 8db5e4b20a6fd1091f426ca7c5d7549745f1b43f) --- api/src/handlers/blocks_api.rs | 6 +-- api/src/handlers/transactions_api.rs | 17 +++--- api/src/handlers/utils.rs | 3 +- api/src/types.rs | 2 +- chain/src/chain.rs | 34 ++++++++---- chain/src/store.rs | 57 +++++++++++++------- chain/src/txhashset/txhashset.rs | 47 ++++++++--------- chain/src/txhashset/utxo_view.rs | 6 +-- chain/tests/mine_simple_chain.rs | 34 +++--------- core/src/core/transaction.rs | 79 ++++++++++++---------------- core/src/libtx/aggsig.rs | 1 + core/src/libtx/build.rs | 4 +- 12 files changed, 150 insertions(+), 140 deletions(-) diff --git a/api/src/handlers/blocks_api.rs b/api/src/handlers/blocks_api.rs index cf0d391f25..47bceaa38a 100644 --- a/api/src/handlers/blocks_api.rs +++ b/api/src/handlers/blocks_api.rs @@ -69,7 +69,7 @@ impl HeaderHandler { ) } }; - match w(&self.chain)?.get_header_for_output(oid.commitment()) { + match w(&self.chain)?.get_header_for_output(oid.id()) { Ok(header) => Ok(BlockHeaderPrintable::from_header(&header)), Err(e) => Err(ErrorKind::NotFound(format!( "Header for output {}, {}", @@ -114,7 +114,7 @@ impl HeaderHandler { return Err(ErrorKind::NotFound(format!("Output {} not found", commit)).into()) } }; - match w(&self.chain)?.get_header_for_output(oid.commitment()) { + match w(&self.chain)?.get_header_for_output(oid.id()) { Ok(header) => return Ok(header.hash()), Err(e) => { return Err(ErrorKind::NotFound(format!( @@ -230,7 +230,7 @@ impl BlockHandler { Some((_, o)) => o, None => return Err(ErrorKind::NotFound(format!("Output {}", commit)).into()), }; - match w(&self.chain)?.get_header_for_output(oid.commitment()) { + match w(&self.chain)?.get_header_for_output(oid.id()) { Ok(header) => return Ok(header.hash()), Err(e) => { return Err(ErrorKind::NotFound(format!( diff --git a/api/src/handlers/transactions_api.rs b/api/src/handlers/transactions_api.rs index 4052acf6a0..968b447f0f 100644 --- a/api/src/handlers/transactions_api.rs +++ b/api/src/handlers/transactions_api.rs @@ -14,6 +14,7 @@ use super::utils::w; use crate::chain; +use crate::core::core::hash::Hashed; use crate::rest::*; use crate::router::{Handler, ResponseFuture}; use crate::types::*; @@ -133,19 +134,21 @@ impl TxHashSetHandler { let c = util::from_hex(id) .map_err(|e| ErrorKind::Argument(format!("Not a valid commitment {}, {}", id, e)))?; let commit = Commitment::from_vec(c); + let output_id = commit.hash(); let chain = w(&self.chain)?; - let output_pos = chain.get_output_pos(&commit).map_err(|e| { + let output_pos = chain.get_output_pos(&output_id).map_err(|e| { ErrorKind::NotFound(format!( "Unable to get a MMR position for commit {}, {}", id, e )) })?; - let merkle_proof = chain::Chain::get_merkle_proof_for_pos(&chain, commit).map_err(|e| { - ErrorKind::NotFound(format!( - "Unable to get a merkle proof for commit {}, {}", - id, e - )) - })?; + let merkle_proof = + chain::Chain::get_merkle_proof_for_pos(&chain, output_id).map_err(|e| { + ErrorKind::NotFound(format!( + "Unable to get a merkle proof for commit {}, {}", + id, e + )) + })?; Ok(OutputPrintable { output_type: OutputType::Coinbase, commit: Commitment::from_vec(vec![]), diff --git a/api/src/handlers/utils.rs b/api/src/handlers/utils.rs index cb308640ed..81356f8d42 100644 --- a/api/src/handlers/utils.rs +++ b/api/src/handlers/utils.rs @@ -14,6 +14,7 @@ use crate::chain; use crate::chain::types::CommitPos; +use crate::core::core::hash::Hashed; use crate::core::core::OutputIdentifier; use crate::rest::*; use crate::types::*; @@ -37,7 +38,7 @@ fn get_unspent( let c = util::from_hex(id) .map_err(|_| ErrorKind::Argument(format!("Not a valid commitment: {}", id)))?; let commit = Commitment::from_vec(c); - let res = chain.get_unspent(commit)?; + let res = chain.get_unspent(commit.hash())?; Ok(res) } diff --git a/api/src/types.rs b/api/src/types.rs index 533eedfedb..bb9d4e571e 100644 --- a/api/src/types.rs +++ b/api/src/types.rs @@ -294,7 +294,7 @@ impl OutputPrintable { OutputType::Transaction }; - let pos = chain.get_unspent(output.commitment())?; + let pos = chain.get_unspent(output.id())?; let spent = pos.is_none(); diff --git a/chain/src/chain.rs b/chain/src/chain.rs index 0b85c43f63..ded11ce6b1 100644 --- a/chain/src/chain.rs +++ b/chain/src/chain.rs @@ -178,6 +178,8 @@ impl Chain { // DB migrations to be run prior to the chain being used. // Migrate full blocks to protocol version v3. Chain::migrate_db_v2_v3(&store)?; + // Migrate output positions from commitment-based to ID-based. + Chain::migrate_db_outputs(&store)?; // open the txhashset, creating a new one if necessary let mut txhashset = txhashset::TxHashSet::open(db_root.clone(), store.clone(), None)?; @@ -711,9 +713,9 @@ impl Chain { /// Returns Err if something went wrong beyond not finding the output. pub fn get_unspent( &self, - commit: Commitment, + output_id: Hash, ) -> Result, Error> { - self.txhashset.read().get_unspent(commit) + self.txhashset.read().get_unspent(output_id) } /// Retrieves an unspent output using its PMMR position @@ -920,9 +922,9 @@ impl Chain { /// Return a merkle proof valid for the current output pmmr state at the /// given pos - pub fn get_merkle_proof_for_pos(&self, commit: Commitment) -> Result { + pub fn get_merkle_proof_for_pos(&self, output_id: Hash) -> Result { let mut txhashset = self.txhashset.write(); - txhashset.merkle_proof(commit) + txhashset.merkle_proof(output_id) } /// Provides a reading view into the current txhashset state as well as @@ -1412,8 +1414,8 @@ impl Chain { } /// Return Commit's MMR position - pub fn get_output_pos(&self, commit: &Commitment) -> Result { - Ok(self.txhashset.read().get_output_pos(commit)?) + pub fn get_output_pos(&self, output_id: &Hash) -> Result { + Ok(self.txhashset.read().get_output_pos(&output_id)?) } /// outputs by insertion index @@ -1550,16 +1552,28 @@ impl Chain { Ok(()) } + /// Migrate our local db outputs from commitment-based to ID-based. + fn migrate_db_outputs(store: &ChainStore) -> Result<(), Error> { + let batch = store.batch()?; + let migrated_count = batch.migrate_output_positions()?; + debug!( + "migrate_db_outputs: migrated {} output position entries", + migrated_count + ); + batch.commit()?; + Ok(()) + } + /// Gets the block header in which a given output appears in the txhashset. - pub fn get_header_for_output(&self, commit: Commitment) -> Result { + pub fn get_header_for_output(&self, output_id: Hash) -> Result { let header_pmmr = self.header_pmmr.read(); let txhashset = self.txhashset.read(); - let (_, pos) = match txhashset.get_unspent(commit)? { + let (_, pos) = match txhashset.get_unspent(output_id)? { Some(o) => o, None => { return Err(ErrorKind::OutputNotFound(format!( - "Not found commit {}", - commit.to_hex() + "Not found output {}", + output_id.to_hex() )) .into()) } diff --git a/chain/src/store.rs b/chain/src/store.rs index a6d7f3ed21..8a2512a2de 100644 --- a/chain/src/store.rs +++ b/chain/src/store.rs @@ -36,7 +36,8 @@ const BLOCK_PREFIX: u8 = b'b'; const HEAD_PREFIX: u8 = b'H'; const TAIL_PREFIX: u8 = b'T'; const HEADER_HEAD_PREFIX: u8 = b'G'; -const OUTPUT_POS_PREFIX: u8 = b'p'; +const OUTPUT_ID_POS_PREFIX: u8 = b'o'; +const OUTPUT_COMMIT_POS_PREFIX: u8 = b'p'; // deprecated /// Prefix for NRD kernel pos index lists. pub const NRD_KERNEL_LIST_PREFIX: u8 = b'K'; @@ -125,19 +126,19 @@ impl ChainStore { } /// Get PMMR pos for the given output commitment. - pub fn get_output_pos(&self, commit: &Commitment) -> Result { - match self.get_output_pos_height(commit)? { + pub fn get_output_pos(&self, output_id: &Hash) -> Result { + match self.get_output_pos_height(output_id)? { Some(pos) => Ok(pos.pos), None => Err(Error::NotFoundErr(format!( "Output position for: {:?}", - commit + output_id ))), } } /// Get PMMR pos and block height for the given output commitment. - pub fn get_output_pos_height(&self, commit: &Commitment) -> Result, Error> { - self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit)) + pub fn get_output_pos_height(&self, output_id: &Hash) -> Result, Error> { + self.db.get_ser(&to_key(OUTPUT_ID_POS_PREFIX, output_id)) } /// Builds a new batch to be used with this store. @@ -270,6 +271,26 @@ impl<'a> Batch<'a> { Ok(()) } + /// Migrate output positions from commitment-based to ID-based keys. + pub fn migrate_output_positions(&self) -> Result { + let start_key = to_key(OUTPUT_COMMIT_POS_PREFIX, ""); + + let mut migrated_count = 0; + let commit_pos_iter: SerIterator<(u64, u64)> = self.db.iter(&start_key)?; + for (key, (pos, height)) in commit_pos_iter { + // Recover commitment from key, which is in format 'p:commitment' + let commit = Commitment::from_vec(key[2..].to_vec()); + + // Save output position in new format + self.save_output_pos_height(&commit.hash(), CommitPos { pos, height })?; + + // Delete the old entry + self.db.delete(&key)?; + migrated_count += 1; + } + Ok(migrated_count) + } + /// Low level function to delete directly by raw key. pub fn delete(&self, key: &[u8]) -> Result<(), Error> { self.db.delete(key) @@ -321,14 +342,14 @@ impl<'a> Batch<'a> { } /// Save output_pos and block height to index. - pub fn save_output_pos_height(&self, commit: &Commitment, pos: CommitPos) -> Result<(), Error> { + pub fn save_output_pos_height(&self, output_id: &Hash, pos: CommitPos) -> Result<(), Error> { self.db - .put_ser(&to_key(OUTPUT_POS_PREFIX, commit)[..], &pos) + .put_ser(&to_key(OUTPUT_ID_POS_PREFIX, output_id)[..], &pos) } /// Delete the output_pos index entry for a spent output. - pub fn delete_output_pos_height(&self, commit: &Commitment) -> Result<(), Error> { - self.db.delete(&to_key(OUTPUT_POS_PREFIX, commit)) + pub fn delete_output_pos_height(&self, output_id: &Hash) -> Result<(), Error> { + self.db.delete(&to_key(OUTPUT_ID_POS_PREFIX, output_id)) } /// Delete the commitment for a spent output. @@ -356,31 +377,31 @@ impl<'a> Batch<'a> { /// When using the output_pos iterator we have access to the index keys but not the /// original commitment that the key is constructed from. So we need a way of comparing /// a key with another commitment without reconstructing the commitment from the key bytes. - pub fn is_match_output_pos_key(&self, key: &[u8], commit: &Commitment) -> bool { - let commit_key = to_key(OUTPUT_POS_PREFIX, commit); + pub fn is_match_output_pos_key(&self, key: &[u8], output_id: &Hash) -> bool { + let commit_key = to_key(OUTPUT_ID_POS_PREFIX, output_id); commit_key == key } /// Iterator over the output_pos index. pub fn output_pos_iter(&self) -> Result, Error> { - let key = to_key(OUTPUT_POS_PREFIX, ""); + let key = to_key(OUTPUT_ID_POS_PREFIX, ""); self.db.iter(&key) } /// Get output_pos from index. - pub fn get_output_pos(&self, commit: &Commitment) -> Result { - match self.get_output_pos_height(commit)? { + pub fn get_output_pos(&self, output_id: &Hash) -> Result { + match self.get_output_pos_height(output_id)? { Some(pos) => Ok(pos.pos), None => Err(Error::NotFoundErr(format!( "Output position for: {:?}", - commit + output_id ))), } } /// Get output_pos and block height from index. - pub fn get_output_pos_height(&self, commit: &Commitment) -> Result, Error> { - self.db.get_ser(&to_key(OUTPUT_POS_PREFIX, commit)) + pub fn get_output_pos_height(&self, output_id: &Hash) -> Result, Error> { + self.db.get_ser(&to_key(OUTPUT_ID_POS_PREFIX, output_id)) } /// Get the previous header. diff --git a/chain/src/txhashset/txhashset.rs b/chain/src/txhashset/txhashset.rs index 4c51caef0c..6f8efdd6e4 100644 --- a/chain/src/txhashset/txhashset.rs +++ b/chain/src/txhashset/txhashset.rs @@ -259,14 +259,14 @@ impl TxHashSet { /// Then we check the entry in the output MMR and confirm the hash matches. pub fn get_unspent( &self, - commit: Commitment, + output_id: Hash, ) -> Result, Error> { - match self.commit_index.get_output_pos_height(&commit) { + match self.commit_index.get_output_pos_height(&output_id) { Ok(Some(pos)) => { let output_pmmr: ReadonlyPMMR<'_, OutputIdentifier, _> = ReadonlyPMMR::at(&self.output_pmmr_h.backend, self.output_pmmr_h.last_pos); if let Some(out) = output_pmmr.get_data(pos.pos) { - if out.commitment() == commit { + if out.id() == output_id { Ok(Some((out, pos))) } else { Ok(None) @@ -378,17 +378,17 @@ impl TxHashSet { } /// Return Commit's MMR position - pub fn get_output_pos(&self, commit: &Commitment) -> Result { - Ok(self.commit_index.get_output_pos(&commit)?) + pub fn get_output_pos(&self, output_id: &Hash) -> Result { + Ok(self.commit_index.get_output_pos(&output_id)?) } /// build a new merkle proof for the given position. - pub fn merkle_proof(&mut self, commit: Commitment) -> Result { - let pos = self.commit_index.get_output_pos(&commit)?; + pub fn merkle_proof(&mut self, output_id: Hash) -> Result { + let pos = self.commit_index.get_output_pos(&output_id)?; PMMR::at(&mut self.output_pmmr_h.backend, self.output_pmmr_h.last_pos) .merkle_proof(pos) .map_err(|e| { - ErrorKind::MerkleProof(format!("Commit {:?}, pos {}, {}", commit, pos, e)).into() + ErrorKind::MerkleProof(format!("ID {:?}, pos {}, {}", output_id, pos, e)).into() }) } @@ -517,11 +517,10 @@ impl TxHashSet { let mut removed_count = 0; for (key, (pos, _)) in batch.output_pos_iter()? { if let Some(out) = output_pmmr.get_data(pos) { - if let Ok(pos_via_mmr) = batch.get_output_pos(&out.commitment()) { + if let Ok(pos_via_mmr) = batch.get_output_pos(&out.id()) { // If the pos matches and the index key matches the commitment // then keep the entry, other we want to clean it up. - if pos == pos_via_mmr && batch.is_match_output_pos_key(&key, &out.commitment()) - { + if pos == pos_via_mmr && batch.is_match_output_pos_key(&key, &out.id()) { continue; } } @@ -534,10 +533,10 @@ impl TxHashSet { removed_count ); - let mut outputs_pos: Vec<(Commitment, u64)> = vec![]; + let mut outputs_pos: Vec<(Hash, u64)> = vec![]; for pos in output_pmmr.leaf_pos_iter() { if let Some(out) = output_pmmr.get_data(pos) { - outputs_pos.push((out.commit, pos)); + outputs_pos.push((out.id(), pos)); } } @@ -567,13 +566,13 @@ impl TxHashSet { let hash = header_pmmr.get_header_hash_by_height(search_height + 1)?; let h = batch.get_block_header(&hash)?; while i < total_outputs { - let (commit, pos) = outputs_pos[i]; + let (output_id, pos) = outputs_pos[i]; if pos > h.output_mmr_size { // Note: MMR position is 1-based and not 0-based, so here must be '>' instead of '>=' break; } batch.save_output_pos_height( - &commit, + &output_id, CommitPos { pos, height: h.height, @@ -1110,7 +1109,7 @@ impl<'a> Extension<'a> { let pos = self.apply_output(out, batch)?; affected_pos.push(pos); batch.save_output_pos_height( - &out.commitment(), + &out.id(), CommitPos { pos, height: b.header.height, @@ -1129,7 +1128,7 @@ impl<'a> Extension<'a> { for (out, pos) in &spent { self.apply_input(out.commitment(), *pos)?; affected_pos.push(pos.pos); - batch.delete_output_pos_height(&out.commitment())?; + batch.delete_output_pos_height(&out.id())?; //save the spent commitments. let hh = HashHeight { hash: b.hash().clone(), @@ -1187,12 +1186,12 @@ impl<'a> Extension<'a> { } fn apply_output(&mut self, out: &Output, batch: &Batch<'_>) -> Result { - let commit = out.commitment(); + let output_id = out.id(); - if let Ok(pos) = batch.get_output_pos(&commit) { + if let Ok(pos) = batch.get_output_pos(&output_id) { if let Some(out_mmr) = self.output_pmmr.get_data(pos) { - if out_mmr.commitment() == commit { - return Err(ErrorKind::DuplicateCommitment(commit).into()); + if out_mmr.id() == output_id { + return Err(ErrorKind::DuplicateCommitment(out_mmr.commitment()).into()); } } } @@ -1267,7 +1266,7 @@ impl<'a> Extension<'a> { let out_id = out_id.as_ref(); debug!("txhashset: merkle_proof: output: {:?}", out_id.commit); // then calculate the Merkle Proof based on the known pos - let pos = batch.get_output_pos(&out_id.commit)?; + let pos = batch.get_output_pos(&out_id.id())?; let merkle_proof = self.output_pmmr.merkle_proof(pos).map_err(|e| { ErrorKind::TxHashSetErr(format!("pmmr get merkle proof at pos {}, {}", pos, e)) })?; @@ -1371,7 +1370,7 @@ impl<'a> Extension<'a> { // Remove any entries from the output_pos created by the block being rewound. let mut missing_count = 0; for out in block.outputs() { - if batch.delete_output_pos_height(&out.commitment()).is_err() { + if batch.delete_output_pos_height(&out.id()).is_err() { missing_count += 1; } } @@ -1402,7 +1401,7 @@ impl<'a> Extension<'a> { if let Ok(spent) = spent { for pos in spent { if let Some(out) = self.output_pmmr.get_data(pos.pos) { - batch.save_output_pos_height(&out.commitment(), pos)?; + batch.save_output_pos_height(&out.id(), pos)?; } } } diff --git a/chain/src/txhashset/utxo_view.rs b/chain/src/txhashset/utxo_view.rs index 14ce7e7b3d..7f0f83305e 100644 --- a/chain/src/txhashset/utxo_view.rs +++ b/chain/src/txhashset/utxo_view.rs @@ -122,10 +122,10 @@ impl<'a> UTXOView<'a> { input: Commitment, batch: &Batch<'_>, ) -> Result<(OutputIdentifier, CommitPos), Error> { - let pos = batch.get_output_pos_height(&input)?; + let pos = batch.get_output_pos_height(&input.hash())?; if let Some(pos) = pos { if let Some(out) = self.output_pmmr.get_data(pos.pos) { - if out.commitment() == input { + if out.id() == input.hash() { return Ok((out, pos)); } else { error!("input mismatch: {:?}, {:?}, {:?}", out, pos, input); @@ -141,7 +141,7 @@ impl<'a> UTXOView<'a> { // Output is valid if it would not result in a duplicate commitment in the output MMR. fn validate_output(&self, output: &Output, batch: &Batch<'_>) -> Result<(), Error> { - if let Ok(pos) = batch.get_output_pos(&output.commitment()) { + if let Ok(pos) = batch.get_output_pos(&output.id()) { if let Some(out_mmr) = self.output_pmmr.get_data(pos) { if out_mmr.commitment() == output.commitment() { return Err(ErrorKind::DuplicateCommitment(output.commitment()).into()); diff --git a/chain/tests/mine_simple_chain.rs b/chain/tests/mine_simple_chain.rs index 5516289915..2e3d1d8fae 100644 --- a/chain/tests/mine_simple_chain.rs +++ b/chain/tests/mine_simple_chain.rs @@ -683,14 +683,8 @@ fn spend_in_fork_and_compact() { let head = chain.head_header().unwrap(); assert_eq!(head.height, 5); assert_eq!(head.hash(), prev_main.hash()); - assert!(chain - .get_unspent(tx2.outputs()[0].commitment()) - .unwrap() - .is_some()); - assert!(chain - .get_unspent(tx1.outputs()[0].commitment()) - .unwrap() - .is_none()); + assert!(chain.get_unspent(tx2.outputs()[0].id()).unwrap().is_some()); + assert!(chain.get_unspent(tx1.outputs()[0].id()).unwrap().is_none()); // mine 2 forked blocks from the first let fork = prepare_block_tx(&kc, &fork_head, &chain, 6, &[tx1.clone()]); @@ -709,14 +703,8 @@ fn spend_in_fork_and_compact() { let head = chain.head_header().unwrap(); assert_eq!(head.height, 5); assert_eq!(head.hash(), prev_main.hash()); - assert!(chain - .get_unspent(tx2.outputs()[0].commitment()) - .unwrap() - .is_some()); - assert!(chain - .get_unspent(tx1.outputs()[0].commitment()) - .unwrap() - .is_none()); + assert!(chain.get_unspent(tx2.outputs()[0].id()).unwrap().is_some()); + assert!(chain.get_unspent(tx1.outputs()[0].id()).unwrap().is_none()); // make the fork win let fork_next = prepare_block(&kc, &prev_fork, &chain, 10); @@ -730,14 +718,8 @@ fn spend_in_fork_and_compact() { let head = chain.head_header().unwrap(); assert_eq!(head.height, 6); assert_eq!(head.hash(), prev_fork.hash()); - assert!(chain - .get_unspent(tx2.outputs()[0].commitment()) - .unwrap() - .is_some()); - assert!(chain - .get_unspent(tx1.outputs()[0].commitment()) - .unwrap() - .is_none()); + assert!(chain.get_unspent(tx2.outputs()[0].id()).unwrap().is_some()); + assert!(chain.get_unspent(tx1.outputs()[0].id()).unwrap().is_none()); // add 20 blocks to go past the test horizon let mut prev = prev_fork; @@ -809,7 +791,7 @@ fn output_header_mappings() { chain.process_block(b, chain::Options::MINE).unwrap(); let header_for_output = chain - .get_header_for_output(reward_outputs[n as usize - 1].commitment()) + .get_header_for_output(reward_outputs[n as usize - 1].id()) .unwrap(); assert_eq!(header_for_output.height, n as u64); @@ -819,7 +801,7 @@ fn output_header_mappings() { // Check all output positions are as expected for n in 1..15 { let header_for_output = chain - .get_header_for_output(reward_outputs[n - 1].commitment()) + .get_header_for_output(reward_outputs[n - 1].id()) .unwrap(); assert_eq!(header_for_output.height, n as u64); } diff --git a/core/src/core/transaction.rs b/core/src/core/transaction.rs index 379b3f1323..d41541fb3f 100644 --- a/core/src/core/transaction.rs +++ b/core/src/core/transaction.rs @@ -36,6 +36,8 @@ use util::static_secp_instance; use util::RwLock; use util::ToHex; +impl DefaultHashable for Commitment {} + /// Relative height field on NRD kernel variant. /// u16 representing a height between 1 and MAX (consensus::WEEK_HEIGHT). #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] @@ -612,11 +614,12 @@ impl TxKernel { self.excess } + /// Return the kernel signature. pub fn excess_sig(&self) -> secp::Signature { match self.proof { KernelProof::Interactive { excess_sig } => excess_sig, KernelProof::NonInteractive { - stealth_excess, + stealth_excess: _, signature, } => signature.get(), } @@ -675,6 +678,7 @@ impl TxKernel { Ok(()) } + /// Creates an interactive TxKernel. pub fn new_interactive( features: KernelFeatures, excess: Commitment, @@ -1607,39 +1611,15 @@ pub fn deaggregate(mk_tx: Transaction, txs: &[Transaction]) -> Result Self { - InputProof::Interactive - } -} - -impl Writeable for InputProof { - fn write(&self, writer: &mut W) -> Result<(), ser::Error> { - Ok(()) - } -} - -impl Readable for InputProof { - fn read(reader: &mut R) -> Result { - Ok(InputProof::Interactive) - } +pub struct InputProof { + /// The hash of the output being spent. + output_hash: Hash, + /// The one-time public key P of the output being spent. + output_pk: secp::PublicKey, + /// The "doubling" key D, chosen by the sender. + doubling_pk: secp::PublicKey, + /// The signature ψ proving knowledge of the discrete logarithm of P and D + sig: aggsig::BatchSignature, } /// A transaction input. @@ -1656,9 +1636,10 @@ pub struct Input { deserialize_with = "secp_ser::commitment_from_hex" )] pub commit: Commitment, - /// The input type: Interactive or non-interactive. - #[serde(default)] - pub proof: InputProof, + /// Proof of ownership of the referenced output. Only needed for non-interactive transctions. + /// For interactive transactions, there's no output public key P to prove knowledge of, + /// so a balanced MW transaction is the only proof needed. + pub proof: Option, } impl DefaultHashable for Input {} @@ -1675,7 +1656,7 @@ impl From<&OutputIdentifier> for Input { Input { features: out.features, commit: out.commit, - proof: InputProof::Interactive, + proof: None, } } } @@ -1686,7 +1667,6 @@ impl Writeable for Input { fn write(&self, writer: &mut W) -> Result<(), ser::Error> { self.features.write(writer)?; self.commit.write(writer)?; - self.proof.write(writer)?; Ok(()) } } @@ -1697,8 +1677,7 @@ impl Readable for Input { fn read(reader: &mut R) -> Result { let features = OutputFeatures::read(reader)?; let commit = Commitment::read(reader)?; - let proof = InputProof::read(reader)?; - Ok(Input::new(features, commit, proof)) + Ok(Input::new(features, commit, None)) } } @@ -1709,7 +1688,7 @@ impl Readable for Input { impl Input { /// Build a new input from the data required to identify and verify an /// output being spent. - pub fn new(features: OutputFeatures, commit: Commitment, proof: InputProof) -> Input { + pub fn new(features: OutputFeatures, commit: Commitment, proof: Option) -> Input { Input { features, commit, @@ -1854,7 +1833,7 @@ impl From<&[OutputIdentifier]> for Inputs { .map(|out| Input { features: out.features, commit: out.commit, - proof: InputProof::Interactive, + proof: None, }) .collect(); inputs.sort_unstable(); @@ -2091,6 +2070,11 @@ impl Output { self.identifier.commitment() } + /// Unique ID of the output + pub fn id(&self) -> Hash { + self.identifier.id() + } + /// Output features. pub fn features(&self) -> OutputFeatures { self.identifier.features @@ -2179,6 +2163,11 @@ impl OutputIdentifier { self.commit } + /// Unique ID of the output. + pub fn id(&self) -> Hash { + self.commit.hash() // TODO: Return output hash for non-interactive transactions + } + /// Is this a coinbase output? pub fn is_coinbase(&self) -> bool { self.features.is_coinbase() @@ -2470,7 +2459,7 @@ mod test { let input = Input { features: OutputFeatures::Plain, commit, - proof: InputProof::Interactive, + proof: None, }; let block_hash = @@ -2487,7 +2476,7 @@ mod test { let input = Input { features: OutputFeatures::Coinbase, commit, - proof: InputProof::Interactive, + proof: None, }; let short_id = input.short_id(&block_hash, nonce); diff --git a/core/src/libtx/aggsig.rs b/core/src/libtx/aggsig.rs index 73131c40e5..63150b6ef1 100644 --- a/core/src/libtx/aggsig.rs +++ b/core/src/libtx/aggsig.rs @@ -465,6 +465,7 @@ pub fn sign_with_blinding( pub struct BatchSignature(Signature); impl BatchSignature { + /// Get the underlying Signature object. pub fn get(&self) -> Signature { self.0 } diff --git a/core/src/libtx/build.rs b/core/src/libtx/build.rs index a63678f13e..b6052219fb 100644 --- a/core/src/libtx/build.rs +++ b/core/src/libtx/build.rs @@ -32,7 +32,7 @@ //! ) use crate::core::{ - Input, InputProof, KernelFeatures, KernelProof, Output, OutputFeatures, Transaction, TxKernel, + Input, KernelFeatures, KernelProof, Output, OutputFeatures, Transaction, TxKernel, }; use crate::libtx::proof::{self, ProofBuild}; use crate::libtx::{aggsig, Error}; @@ -73,7 +73,7 @@ where .keychain .commit(value, &key_id, SwitchCommitmentType::Regular)?; // TODO: proper support for different switch commitment schemes - let input = Input::new(features, commit, InputProof::Interactive); + let input = Input::new(features, commit, None); Ok(( tx.with_input(input), sum.sub_key_id(key_id.to_value_path(value)), From fdfa242facde1a21aa027e046d580ce7795c147d Mon Sep 17 00:00:00 2001 From: vertose Date: Fri, 27 May 2022 13:19:36 -0400 Subject: [PATCH 3/4] migrate spent commitments to ids and finish moving all input and output handling to id-based (sans APIs and a few tests) --- api/src/handlers/transactions_api.rs | 2 +- api/src/handlers/utils.rs | 2 +- chain/src/chain.rs | 11 ++-- chain/src/error.rs | 10 ++-- chain/src/pipe.rs | 18 +++---- chain/src/store.rs | 64 +++++++++++++----------- chain/src/txhashset/txhashset.rs | 10 ++-- chain/src/txhashset/utxo_view.rs | 20 ++++---- chain/tests/process_block_cut_through.rs | 3 +- core/src/core/transaction.rs | 14 +++++- pool/src/types.rs | 4 +- servers/src/mining/mine_block.rs | 14 +++--- 12 files changed, 94 insertions(+), 78 deletions(-) diff --git a/api/src/handlers/transactions_api.rs b/api/src/handlers/transactions_api.rs index 968b447f0f..2510acca06 100644 --- a/api/src/handlers/transactions_api.rs +++ b/api/src/handlers/transactions_api.rs @@ -133,7 +133,7 @@ impl TxHashSetHandler { fn get_merkle_proof_for_output(&self, id: &str) -> Result { let c = util::from_hex(id) .map_err(|e| ErrorKind::Argument(format!("Not a valid commitment {}, {}", id, e)))?; - let commit = Commitment::from_vec(c); + let commit = Commitment::from_vec(c); // todo: switch to output ID let output_id = commit.hash(); let chain = w(&self.chain)?; let output_pos = chain.get_output_pos(&output_id).map_err(|e| { diff --git a/api/src/handlers/utils.rs b/api/src/handlers/utils.rs index 81356f8d42..cf48d64dd2 100644 --- a/api/src/handlers/utils.rs +++ b/api/src/handlers/utils.rs @@ -37,7 +37,7 @@ fn get_unspent( ) -> Result, Error> { let c = util::from_hex(id) .map_err(|_| ErrorKind::Argument(format!("Not a valid commitment: {}", id)))?; - let commit = Commitment::from_vec(c); + let commit = Commitment::from_vec(c); // todo: switch to output ID let res = chain.get_unspent(commit.hash())?; Ok(res) } diff --git a/chain/src/chain.rs b/chain/src/chain.rs index ded11ce6b1..c3974cc29c 100644 --- a/chain/src/chain.rs +++ b/chain/src/chain.rs @@ -1552,13 +1552,18 @@ impl Chain { Ok(()) } - /// Migrate our local db outputs from commitment-based to ID-based. + /// Migrate our local db output position and spent entries from commitment-based to ID-based. fn migrate_db_outputs(store: &ChainStore) -> Result<(), Error> { let batch = store.batch()?; - let migrated_count = batch.migrate_output_positions()?; + let migrated_positions = batch.migrate_output_positions()?; debug!( "migrate_db_outputs: migrated {} output position entries", - migrated_count + migrated_positions + ); + let migrated_spent = batch.migrate_spent_commitments()?; + debug!( + "migrate_db_outputs: migrated {} spent commitment entries", + migrated_spent ); batch.commit()?; Ok(()) diff --git a/chain/src/error.rs b/chain/src/error.rs index fe0f9039e2..578582aafb 100644 --- a/chain/src/error.rs +++ b/chain/src/error.rs @@ -13,11 +13,11 @@ // limitations under the License. //! Error types for chain +use crate::core::core::hash::Hash; use crate::core::core::{block, committed, transaction}; use crate::core::ser; use crate::keychain; use crate::util::secp; -use crate::util::secp::pedersen::Commitment; use failure::{Backtrace, Context, Fail}; use grin_store as store; use std::fmt::{self, Display}; @@ -82,10 +82,10 @@ pub enum ErrorKind { Secp(secp::Error), /// One of the inputs in the block has already been spent #[fail(display = "Already Spent: {:?}", _0)] - AlreadySpent(Commitment), - /// An output with that commitment already exists (should be unique) - #[fail(display = "Duplicate Commitment: {:?}", _0)] - DuplicateCommitment(Commitment), + AlreadySpent(Hash), + /// An output with that ID already exists (should be unique) + #[fail(display = "Duplicate Output ID: {:?}", _0)] + DuplicateOutputId(Hash), /// Attempt to spend a coinbase output before it sufficiently matures. #[fail(display = "Attempt to spend immature coinbase")] ImmatureCoinbase, diff --git a/chain/src/pipe.rs b/chain/src/pipe.rs index 666fed7647..46a0ed3d92 100644 --- a/chain/src/pipe.rs +++ b/chain/src/pipe.rs @@ -87,25 +87,22 @@ pub fn check_against_spent_output( header_extension: &txhashset::HeaderExtension<'_>, batch: &store::Batch<'_>, ) -> Result<(), Error> { - let output_commits = tx.outputs.iter().map(|output| output.identifier.commit); + let output_ids = tx.outputs.iter().map(|output| output.id()); let tip = batch.head().unwrap(); let fork_height = fork_point_height.unwrap_or(tip.height); //convert the list of local branch bocks header hashes to a hash set for quick search let local_branch_blocks_list = local_branch_blocks.unwrap_or(Vec::new()); let local_branch_blocks_set = HashSet::<&Hash>::from_iter(local_branch_blocks_list.iter()); - for commit in output_commits { - let commit_hash = batch.get_spent_commitments(&commit)?; // check to see if this commitment is in the spent records in db - if let Some(c_hash) = commit_hash { + for id in output_ids { + let id_hash = batch.get_spent_ids(&id)?; // check to see if this output id is in the spent records in db + if let Some(c_hash) = id_hash { for hash_val in c_hash { //first check the local branch. if hash_val.height > fork_height && local_branch_blocks_set.contains(&hash_val.hash) { //first check the local branch. - error!( - "output contains spent commtiment:{:?} from local branch", - commit - ); + error!("output contains spent id:{:?} from local branch", id); return Err(ErrorKind::Other( "output invalid, could be a replay attack".to_string(), ) @@ -117,10 +114,7 @@ pub fn check_against_spent_output( warn!("the height data is messed up in the lmdb"); } if header_extension.is_on_current_chain(&header, batch).is_ok() { - error!( - "output contains spent commtiment:{:?} from the main chain", - commit - ); + error!("output contains spent id:{:?} from the main chain", id); return Err(ErrorKind::Other( "output invalid, could be a replay attack".to_string(), ) diff --git a/chain/src/store.rs b/chain/src/store.rs index 8a2512a2de..fdc1b7c4c4 100644 --- a/chain/src/store.rs +++ b/chain/src/store.rs @@ -47,7 +47,8 @@ pub const NRD_KERNEL_ENTRY_PREFIX: u8 = b'k'; const BLOCK_INPUT_BITMAP_PREFIX: u8 = b'B'; const BLOCK_SUMS_PREFIX: u8 = b'M'; const BLOCK_SPENT_PREFIX: u8 = b'S'; -const BLOCK_SPENT_COMMITMENT_PREFIX: u8 = b'C'; +const BLOCK_SPENT_COMMITMENT_PREFIX: u8 = b'C'; // deprecated +const BLOCK_SPENT_ID_PREFIX: u8 = b's'; /// All chain-related database operations pub struct ChainStore { @@ -228,12 +229,10 @@ impl<'a> Batch<'a> { Ok(()) } - /// We maintain a "spent" commitments for each full block to allow validation of input against spent output + /// We maintain a "spent" ids for each full block to allow validation of input against spent output /// for blocks within the horizon. These data will be deleted when chain is compact. - pub fn save_spent_commitments(&self, spent: &Commitment, hh: HashHeight) -> Result<(), Error> { - let hash_list = self - .db - .get_ser(&to_key(BLOCK_SPENT_COMMITMENT_PREFIX, spent))?; + pub fn save_spent_id(&self, spent: &Hash, hh: HashHeight) -> Result<(), Error> { + let hash_list = self.db.get_ser(&to_key(BLOCK_SPENT_ID_PREFIX, spent))?; let mut spent_list; if let Some(list) = hash_list { spent_list = list; @@ -242,27 +241,17 @@ impl<'a> Batch<'a> { } spent_list.push(hh); self.db.put_ser( - &to_key(BLOCK_SPENT_COMMITMENT_PREFIX, spent)[..], + &to_key(BLOCK_SPENT_ID_PREFIX, spent)[..], &spent_list.to_vec(), )?; Ok(()) } - /// get spent commitment - pub fn get_spent_commitments( - &self, - spent: &Commitment, - ) -> Result>, Error> { - self.db - .get_ser(&to_key(BLOCK_SPENT_COMMITMENT_PREFIX, spent)) + /// get spent id + pub fn get_spent_ids(&self, spent: &Hash) -> Result>, Error> { + self.db.get_ser(&to_key(BLOCK_SPENT_ID_PREFIX, spent)) } - // /// An iterator to all "spent" commit in db - // pub fn spent_commitment_iter(&self) -> Result>, Error> { - // let key = to_key(BLOCK_SPENT_COMMITMENT_PREFIX, ""); - // self.db.iter(&key) - // } - /// Migrate a block stored in the db by serializing it using the provided protocol version. /// Block may have been read using a previous protocol version but we do not actually care. pub fn migrate_block(&self, b: &Block, version: ProtocolVersion) -> Result<(), Error> { @@ -291,6 +280,26 @@ impl<'a> Batch<'a> { Ok(migrated_count) } + /// Migrate spent commitments to output IDs. + pub fn migrate_spent_commitments(&self) -> Result { + let start_key = to_key(BLOCK_SPENT_COMMITMENT_PREFIX, ""); + + let mut migrated_count = 0; + let commit_pos_iter: SerIterator = self.db.iter(&start_key)?; + for (key, hash_height) in commit_pos_iter { + // Recover commitment from key, which is in format 'S:commitment' + let commit = Commitment::from_vec(key[2..].to_vec()); + + // Save spent id in new format + self.save_spent_id(&commit.hash(), hash_height)?; + + // Delete the old entry + self.db.delete(&key)?; + migrated_count += 1; + } + Ok(migrated_count) + } + /// Low level function to delete directly by raw key. pub fn delete(&self, key: &[u8]) -> Result<(), Error> { self.db.delete(key) @@ -303,12 +312,12 @@ impl<'a> Batch<'a> { match inputs { Inputs::CommitOnly(inputs) => { for input in inputs { - let _ = self.delete_spent_commitments(&input.commitment(), bh); + let _ = self.delete_spent_id(&input.output_id(), bh); } } Inputs::FeaturesAndCommit(inputs) => { for input in inputs { - let _ = self.delete_spent_commitments(&input.commitment(), bh); + let _ = self.delete_spent_id(&input.output_id(), bh); } } } @@ -352,9 +361,9 @@ impl<'a> Batch<'a> { self.db.delete(&to_key(OUTPUT_ID_POS_PREFIX, output_id)) } - /// Delete the commitment for a spent output. - pub fn delete_spent_commitments(&self, spent: &Commitment, hash: &Hash) -> Result<(), Error> { - let hash_list = self.get_spent_commitments(spent)?; + /// Delete the id for a spent output. + pub fn delete_spent_id(&self, spent: &Hash, hash: &Hash) -> Result<(), Error> { + let hash_list = self.get_spent_ids(spent)?; let hash_list_unwrap = hash_list.unwrap_or(vec![]); let filtered_list: Vec<&HashHeight> = hash_list_unwrap .iter() @@ -363,12 +372,11 @@ impl<'a> Batch<'a> { if filtered_list.len() != 0 { self.db.put_ser( - &to_key(BLOCK_SPENT_COMMITMENT_PREFIX, spent)[..], + &to_key(BLOCK_SPENT_ID_PREFIX, spent)[..], &filtered_list.to_vec(), )?; } else { - self.db - .delete(&to_key(BLOCK_SPENT_COMMITMENT_PREFIX, spent))?; + self.db.delete(&to_key(BLOCK_SPENT_ID_PREFIX, spent))?; } Ok(()) diff --git a/chain/src/txhashset/txhashset.rs b/chain/src/txhashset/txhashset.rs index 6f8efdd6e4..cc4cdd1663 100644 --- a/chain/src/txhashset/txhashset.rs +++ b/chain/src/txhashset/txhashset.rs @@ -1126,7 +1126,7 @@ impl<'a> Extension<'a> { .utxo_view(header_ext) .validate_inputs(&b.inputs(), batch)?; for (out, pos) in &spent { - self.apply_input(out.commitment(), *pos)?; + self.apply_input(out.id(), *pos)?; affected_pos.push(pos.pos); batch.delete_output_pos_height(&out.id())?; //save the spent commitments. @@ -1134,7 +1134,7 @@ impl<'a> Extension<'a> { hash: b.hash().clone(), height: b.header.height.clone(), }; - batch.save_spent_commitments(&out.commitment().clone(), hh)?; + batch.save_spent_id(&out.id(), hh)?; } // Update the spent index with spent pos. @@ -1172,7 +1172,7 @@ impl<'a> Extension<'a> { // Prune output and rangeproof PMMRs based on provided pos. // Input is not valid if we cannot prune successfully. - fn apply_input(&mut self, commit: Commitment, pos: CommitPos) -> Result<(), Error> { + fn apply_input(&mut self, output_id: Hash, pos: CommitPos) -> Result<(), Error> { match self.output_pmmr.prune(pos.pos) { Ok(true) => { self.rproof_pmmr @@ -1180,7 +1180,7 @@ impl<'a> Extension<'a> { .map_err(|e| ErrorKind::TxHashSetErr(format!("pmmr prune error, {}", e)))?; Ok(()) } - Ok(false) => Err(ErrorKind::AlreadySpent(commit).into()), + Ok(false) => Err(ErrorKind::AlreadySpent(output_id).into()), Err(e) => Err(ErrorKind::TxHashSetErr(e).into()), } } @@ -1191,7 +1191,7 @@ impl<'a> Extension<'a> { if let Ok(pos) = batch.get_output_pos(&output_id) { if let Some(out_mmr) = self.output_pmmr.get_data(pos) { if out_mmr.id() == output_id { - return Err(ErrorKind::DuplicateCommitment(out_mmr.commitment()).into()); + return Err(ErrorKind::DuplicateOutputId(out_mmr.id()).into()); } } } diff --git a/chain/src/txhashset/utxo_view.rs b/chain/src/txhashset/utxo_view.rs index 7f0f83305e..00ae717e3d 100644 --- a/chain/src/txhashset/utxo_view.rs +++ b/chain/src/txhashset/utxo_view.rs @@ -21,7 +21,7 @@ use crate::core::global; use crate::error::{Error, ErrorKind}; use crate::store::Batch; use crate::types::CommitPos; -use crate::util::secp::pedersen::{Commitment, RangeProof}; +use crate::util::secp::pedersen::RangeProof; use grin_store::pmmr::PMMRBackend; /// Readonly view of the UTXO set (based on output MMR). @@ -86,7 +86,7 @@ impl<'a> UTXOView<'a> { let outputs_spent: Result, Error> = inputs .iter() .map(|input| { - self.validate_input(input.commitment(), batch) + self.validate_input(input.output_id(), batch) .and_then(|(out, pos)| Ok((out, pos))) }) .collect(); @@ -96,7 +96,7 @@ impl<'a> UTXOView<'a> { let outputs_spent: Result, Error> = inputs .iter() .map(|input| { - self.validate_input(input.commitment(), batch) + self.validate_input(input.output_id(), batch) .and_then(|(out, pos)| { // Unspent output found. // Check input matches full output identifier. @@ -116,16 +116,16 @@ impl<'a> UTXOView<'a> { // Input is valid if it is spending an (unspent) output // that currently exists in the output MMR. - // Note: We lookup by commitment. Caller must compare the full input as necessary. + // Note: We lookup by ID. Caller must compare the full input as necessary. fn validate_input( &self, - input: Commitment, + input: Hash, batch: &Batch<'_>, ) -> Result<(OutputIdentifier, CommitPos), Error> { - let pos = batch.get_output_pos_height(&input.hash())?; + let pos = batch.get_output_pos_height(&input)?; if let Some(pos) = pos { if let Some(out) = self.output_pmmr.get_data(pos.pos) { - if out.id() == input.hash() { + if out.id() == input { return Ok((out, pos)); } else { error!("input mismatch: {:?}, {:?}, {:?}", out, pos, input); @@ -143,8 +143,8 @@ impl<'a> UTXOView<'a> { fn validate_output(&self, output: &Output, batch: &Batch<'_>) -> Result<(), Error> { if let Ok(pos) = batch.get_output_pos(&output.id()) { if let Some(out_mmr) = self.output_pmmr.get_data(pos) { - if out_mmr.commitment() == output.commitment() { - return Err(ErrorKind::DuplicateCommitment(output.commitment()).into()); + if out_mmr.id() == output.id() { + return Err(ErrorKind::DuplicateOutputId(output.id()).into()); } } } @@ -175,7 +175,7 @@ impl<'a> UTXOView<'a> { // Lookup the outputs being spent. let spent: Result, _> = inputs .iter() - .map(|x| self.validate_input(x.commitment(), batch)) + .map(|x| self.validate_input(x.output_id(), batch)) .collect(); // Find the max pos of any coinbase being spent. diff --git a/chain/tests/process_block_cut_through.rs b/chain/tests/process_block_cut_through.rs index ed794f3d16..b478a9b427 100644 --- a/chain/tests/process_block_cut_through.rs +++ b/chain/tests/process_block_cut_through.rs @@ -21,6 +21,7 @@ use grin_util as util; use self::chain_test_helper::{clean_output_dir, genesis_block, init_chain}; use crate::chain::{pipe, Chain, Options}; +use crate::core::core::hash::Hashed; use crate::core::core::verifier_cache::LruVerifierCache; use crate::core::core::{block, pmmr, transaction}; use crate::core::core::{Block, KernelFeatures, Transaction, Weighting}; @@ -148,7 +149,7 @@ fn process_block_cut_through() -> Result<(), chain::Error> { // Transaction will not validate against the chain (utxo). assert_eq!( chain.validate_tx(&tx).map_err(|e| e.kind()), - Err(chain::ErrorKind::DuplicateCommitment(commit)), + Err(chain::ErrorKind::DuplicateOutputId(commit.hash())), ); // Build a block with this single invalid transaction. diff --git a/core/src/core/transaction.rs b/core/src/core/transaction.rs index d41541fb3f..443b733230 100644 --- a/core/src/core/transaction.rs +++ b/core/src/core/transaction.rs @@ -1696,10 +1696,15 @@ impl Input { } } - /// The input commitment which _partially_ identifies the output being + /// The output ID which _partially_ identifies the output being /// spent. In the presence of a fork we need additional info to uniquely /// identify the output. Specifically the block hash (to correctly /// calculate lock_height for coinbase outputs). + pub fn output_id(&self) -> Hash { + self.commit.hash() // todo: return output hash for NITX outputs + } + + /// The commitment of the output being spent. pub fn commitment(&self) -> Commitment { self.commit } @@ -1801,6 +1806,11 @@ impl CommitWrapper { pub fn commitment(&self) -> Commitment { self.commit } + + /// Output identifier. + pub fn output_id(&self) -> Hash { + self.commit.hash() + } } /// Wrapper around a vec of inputs. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] @@ -2137,7 +2147,7 @@ pub struct OutputIdentifier { serialize_with = "secp_ser::as_hex", deserialize_with = "secp_ser::commitment_from_hex" )] - pub commit: Commitment, + pub commit: Commitment, // todo: replace with output ID } impl DefaultHashable for OutputIdentifier {} diff --git a/pool/src/types.rs b/pool/src/types.rs index cbe8c410a5..133db43f9c 100644 --- a/pool/src/types.rs +++ b/pool/src/types.rs @@ -233,8 +233,8 @@ pub enum PoolError { #[fail(display = "Tx Pool Low fee transaction {}", _0)] LowFeeTransaction(u64), /// Attempt to add a duplicate output to the pool. - #[fail(display = "Tx Pool Duplicate commitment")] - DuplicateCommitment, + #[fail(display = "Tx Pool Duplicate output ID")] + DuplicateOutputId, /// Attempt to add a duplicate kernel or output duplicate to spent to the pool. #[fail(display = "Tx Pool Duplicate kernel or duplicate output to spent")] DuplicateKernelOrDuplicateSpent, diff --git a/servers/src/mining/mine_block.rs b/servers/src/mining/mine_block.rs index 963df13870..e39d440d69 100644 --- a/servers/src/mining/mine_block.rs +++ b/servers/src/mining/mine_block.rs @@ -87,10 +87,8 @@ pub fn get_block( let mut new_key_id = key_id.to_owned(); match e { self::Error::Chain(c) => match c.kind() { - chain::ErrorKind::DuplicateCommitment(_) => { - debug!( - "Duplicate commit for potential coinbase detected. Trying next derivation." - ); + chain::ErrorKind::DuplicateOutputId(_) => { + debug!("Duplicate output ID for potential coinbase detected. Trying next derivation."); // use the next available key to generate a different coinbase commitment new_key_id = None; } @@ -197,12 +195,12 @@ fn build_block( Ok(_) => Ok((b, block_fees)), Err(e) => { match e.kind() { - // If this is a duplicate commitment then likely trying to use + // If this is a duplicate output ID then likely trying to use // a key that hass already been derived but not in the wallet // for some reason, allow caller to retry. - chain::ErrorKind::DuplicateCommitment(e) => Err(Error::Chain( - chain::ErrorKind::DuplicateCommitment(e).into(), - )), + chain::ErrorKind::DuplicateOutputId(e) => { + Err(Error::Chain(chain::ErrorKind::DuplicateOutputId(e).into())) + } // Some other issue, possibly duplicate kernel _ => { From 0be720f69e10c4cfd284a12a87293b818116597b Mon Sep 17 00:00:00 2001 From: vertose Date: Fri, 24 Jun 2022 16:49:20 -0400 Subject: [PATCH 4/4] libtx builder for non-interactive txns --- core/src/core/block.rs | 2 + core/src/core/transaction.rs | 105 +++++++++++- core/src/global.rs | 2 +- core/src/libtx/mod.rs | 1 + core/src/libtx/nitx.rs | 303 +++++++++++++++++++++++++++++++++++ core/tests/block.rs | 6 +- core/tests/core.rs | 11 +- keychain/src/lib.rs | 1 + keychain/src/stealth.rs | 57 +++++++ 9 files changed, 473 insertions(+), 15 deletions(-) create mode 100644 core/src/libtx/nitx.rs create mode 100644 keychain/src/stealth.rs diff --git a/core/src/core/block.rs b/core/src/core/block.rs index 6cd2315477..0c3b8a2f59 100644 --- a/core/src/core/block.rs +++ b/core/src/core/block.rs @@ -789,6 +789,8 @@ impl Block { self.block_kernel_offset(prev_kernel_offset.clone())?, )?; + // todo: Verify stealth sums + Ok(kernel_sum) } diff --git a/core/src/core/transaction.rs b/core/src/core/transaction.rs index 443b733230..18847345e0 100644 --- a/core/src/core/transaction.rs +++ b/core/src/core/transaction.rs @@ -432,6 +432,9 @@ pub enum Error { /// Underlying serialization error. #[fail(display = "Tx Serialization error, {}", _0)] Serialization(ser::Error), + /// Error in AggSig library. + #[fail(display = "Tx Error, {}", _0)] + AggSigError(String), } impl From for Error { @@ -614,6 +617,23 @@ impl TxKernel { self.excess } + /// Return the public key the signature proves knowledge of. + pub fn excess_pubkey(&self) -> Result { + let pubkey = match self.proof { + KernelProof::Interactive { excess_sig: _ } => self.excess.to_pubkey()?, + KernelProof::NonInteractive { + stealth_excess, + signature: _, + } => aggsig::build_composite_pubkey( + &secp::Secp256k1::new(), + &self.excess.to_pubkey()?, + &stealth_excess.to_pubkey()?, + ) + .map_err(|e| Error::AggSigError(e.to_string()))?, + }; + Ok(pubkey) + } + /// Return the kernel signature. pub fn excess_sig(&self) -> secp::Signature { match self.proof { @@ -640,7 +660,7 @@ impl TxKernel { let secp = secp.lock(); let sig = &self.excess_sig(); // Verify aggsig directly in libsecp - let pubkey = &self.excess.to_pubkey()?; + let pubkey = &self.excess_pubkey()?; if !aggsig::verify_single( &secp, &sig, @@ -667,7 +687,7 @@ impl TxKernel { for tx_kernel in tx_kernels { sigs.push(tx_kernel.excess_sig()); - pubkeys.push(tx_kernel.excess.to_pubkey()?); + pubkeys.push(tx_kernel.excess_pubkey()?); msgs.push(tx_kernel.msg_to_sign()?); } @@ -686,7 +706,7 @@ impl TxKernel { ) -> TxKernel { TxKernel { features, - excess: excess, + excess, proof: KernelProof::Interactive { excess_sig }, } } @@ -1202,6 +1222,8 @@ pub struct Transaction { deserialize_with = "secp_ser::blind_from_hex" )] pub offset: BlindingFactor, + /// The stealth offset + pub stealth_offset: Option, /// The transaction body - inputs/outputs/kernels pub body: TransactionBody, } @@ -1211,7 +1233,7 @@ impl DefaultHashable for Transaction {} /// PartialEq impl PartialEq for Transaction { fn eq(&self, tx: &Transaction) -> bool { - self.body == tx.body && self.offset == tx.offset + self.body == tx.body && self.offset == tx.offset && self.stealth_offset == tx.stealth_offset } } @@ -1220,6 +1242,14 @@ impl PartialEq for Transaction { impl Writeable for Transaction { fn write(&self, writer: &mut W) -> Result<(), ser::Error> { self.offset.write(writer)?; + if writer.protocol_version().value() >= 4 { + if let Some(stealth_offset) = self.stealth_offset.as_ref() { + writer.write_u8(1)?; + stealth_offset.write(writer)?; + } else { + writer.write_u8(0)?; + } + } self.body.write(writer)?; Ok(()) } @@ -1230,8 +1260,16 @@ impl Writeable for Transaction { impl Readable for Transaction { fn read(reader: &mut R) -> Result { let offset = BlindingFactor::read(reader)?; + let mut stealth_offset = None; + if reader.protocol_version().value() >= 4 && reader.read_u8()? == 1 { + stealth_offset = Some(BlindingFactor::read(reader)?); + }; let body = TransactionBody::read(reader)?; - let tx = Transaction { offset, body }; + let tx = Transaction { + offset, + stealth_offset, + body, + }; // Now "lightweight" validation of the tx. // Treat any validation issues as data corruption. @@ -1269,6 +1307,7 @@ impl Transaction { pub fn empty() -> Transaction { Transaction { offset: BlindingFactor::zero(), + stealth_offset: None, body: Default::default(), } } @@ -1282,6 +1321,7 @@ impl Transaction { Transaction { offset: BlindingFactor::zero(), + stealth_offset: None, body, } } @@ -1292,6 +1332,15 @@ impl Transaction { Transaction { offset, ..self } } + /// Creates a new transaction using this transaction as a template + /// and with the specified stealth offset. + pub fn with_stealth_offset(self, stealth_offset: BlindingFactor) -> Transaction { + Transaction { + stealth_offset: Some(stealth_offset), + ..self + } + } + /// Builds a new transaction with the provided inputs added. Existing /// inputs, if any, are kept intact. /// Sort order is maintained. @@ -1609,11 +1658,11 @@ pub fn deaggregate(mk_tx: Transaction, txs: &[Transaction]) -> Result InputProof { + InputProof { + output_id, + output_pk, + doubling_pk, + sig, + } + } +} + /// A transaction input. /// /// Primarily a reference to an output being spent by the transaction. @@ -1884,10 +1950,13 @@ impl Writeable for Inputs { }, Inputs::FeaturesAndCommit(inputs) => match writer.protocol_version().value() { 0..=2 => inputs.write(writer)?, - 3..=ProtocolVersion::MAX => { + 3 => { let inputs: Vec = self.into(); inputs.write(writer)?; } + 4..=ProtocolVersion::MAX => { + inputs.write(writer)?; + } }, } } @@ -2070,6 +2139,26 @@ impl Output { } } + /// Create a new output for a non-interactive transaction with the provided attributes. + pub fn new_nitx( + features: OutputFeatures, + commit: Commitment, + proof: RangeProof, + ephemeral_pk: secp::PublicKey, + output_pk: secp::PublicKey, + sig: secp::Signature, + ) -> Output { + Output { + identifier: OutputIdentifier { features, commit }, + proof, + keys: Some(OutputKeys { + ephemeral_pk, + output_pk, + sig, + }), + } + } + /// Output identifier. pub fn identifier(&self) -> OutputIdentifier { self.identifier diff --git a/core/src/global.rs b/core/src/global.rs index c1eb793fb4..85075d77da 100644 --- a/core/src/global.rs +++ b/core/src/global.rs @@ -41,7 +41,7 @@ use util::OneTime; /// Note: We also use a specific (possible different) protocol version /// for both the backend database and MMR data files. /// NOTE, grin bump the protocol version to 1000, but in any case fo far 1,2,3 are supported. -pub const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion(3); +pub const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion(4); /// Automated testing edge_bits pub const AUTOMATED_TESTING_MIN_EDGE_BITS: u8 = 10; diff --git a/core/src/libtx/mod.rs b/core/src/libtx/mod.rs index b2e2e41df8..86fa98e10d 100644 --- a/core/src/libtx/mod.rs +++ b/core/src/libtx/mod.rs @@ -24,6 +24,7 @@ pub mod aggsig; pub mod build; mod error; +pub mod nitx; pub mod proof; pub mod reward; pub mod secp_ser; diff --git a/core/src/libtx/nitx.rs b/core/src/libtx/nitx.rs new file mode 100644 index 0000000000..954c3fe547 --- /dev/null +++ b/core/src/libtx/nitx.rs @@ -0,0 +1,303 @@ +//! Utility functions to build non-interactive transactions. Handles the blinding of +//! inputs and outputs, maintaining the sum of blinding factors, producing +//! the excess signature, etc. +//! +//! Example: +//! nitx::transaction( +//! KernelFeatures::Plain{ fee: 2 }, +//! vec![ +//! input_rand(75), +//! output_rand(42), +//! output_rand(32), +//! ], +//! keychain, +//! builder +//! ) + +use crate::core::hash::{Hash, Hashed}; +use crate::core::{ + Input, InputProof, KernelFeatures, KernelProof, Output, OutputFeatures, Transaction, TxKernel, +}; +use crate::libtx::proof::ProofBuild; +use crate::libtx::{aggsig, Error}; +use keychain::{stealth, BlindSum, BlindingFactor, Identifier, Keychain, SwitchCommitmentType}; +use stealth::StealthAddress; +use util::secp; +use util::secp::{Message, PublicKey, SecretKey}; + +/// Context information available to transaction combinators. +pub struct Context<'a, K, B> +where + K: Keychain, + B: ProofBuild, +{ + /// The keychain used for key derivation + pub keychain: &'a K, + /// The bulletproof builder + pub builder: &'a B, +} + +/// Function type returned by the transaction combinators. Transforms a +/// (Transaction, BlindSum, BlindSum) tuple into another, given the provided context. +/// Will return an Err if something went wrong at any point during transaction building. +type Append = dyn for<'a> Fn( + &'a mut Context<'_, K, B>, + Result<(Transaction, BlindSum, BlindSum), Error>, +) -> Result<(Transaction, BlindSum, BlindSum), Error>; + +/// Adds an input with the provided value, excess key, and doubling key to the transaction +/// being built. +pub fn input( + output_id: Hash, + value: u64, + features: OutputFeatures, + excess_key_id: Identifier, + doubling_key_id: Identifier, + spend_key: Option, +) -> Box> +where + K: Keychain, + B: ProofBuild, +{ + Box::new( + move |build, acc| -> Result<(Transaction, BlindSum, BlindSum), Error> { + if let Ok((tx, sum_excess, sum_stealth)) = acc { + let doubling_key = build.keychain.derive_key( + value, + &doubling_key_id, + SwitchCommitmentType::None, + )?; + let commit = + build + .keychain + .commit(value, &excess_key_id, SwitchCommitmentType::Regular)?; + let input_proof = if let Some(spend_key) = spend_key.as_ref() { + let sig = aggsig::sign_dual_key( + build.keychain.secp(), + &Message::from_slice(output_id.to_vec().as_slice())?, + &spend_key, + &doubling_key, + )?; + Some(InputProof::new( + output_id.clone(), + PublicKey::from_secret_key(build.keychain.secp(), &spend_key)?, + PublicKey::from_secret_key(build.keychain.secp(), &doubling_key)?, + sig, + )) + } else { + None + }; + + let input = Input::new(features, commit, input_proof); + Ok(( + tx.with_input(input), + sum_excess.sub_key_id(excess_key_id.to_value_path(value)), + sum_stealth.sub_blinding_factor(BlindingFactor::from_secret_key(doubling_key)), + )) + } else { + acc + } + }, + ) +} + +/// Adds an output for the provided receiver with the value and blinding key to the transaction +/// being built. +#[allow(non_snake_case)] +pub fn output( + value: u64, + key_id: Identifier, + receiver_addr: StealthAddress, +) -> Box> +where + K: Keychain, + B: ProofBuild, +{ + Box::new( + move |build, acc| -> Result<(Transaction, BlindSum, BlindSum), Error> { + let (tx, sum, stealth_sum) = acc?; + + let secp = build.keychain.secp(); + + let r = build + .keychain + .derive_key(value, &key_id, SwitchCommitmentType::None)?; + let ephemeral_pk = PublicKey::from_secret_key(secp, &r)?; + + // Calculate shared secrets (k,q) + let (k, q) = receiver_addr.calc_shared_secrets(secp, &r)?; + + let blind = secp.blind_switch(value, q)?; + let commit = secp.commit(value, blind.clone())?; + + debug!("Building output: {}, {:?}", value, commit); + + // Build rangeproof + let rewind_nonce = build.builder.rewind_nonce(secp, &commit)?; + let private_nonce = build.builder.private_nonce(secp, &commit)?; + let message = + build + .builder + .proof_message(secp, &key_id, SwitchCommitmentType::Regular)?; + let proof = secp.bullet_proof( + value, + blind.clone(), + rewind_nonce, + private_nonce, + None, + Some(message), + ); + + let mut output_pk = receiver_addr.B; + output_pk.add_exp_assign(secp, &k)?; + + // Sign (commit||proof||output_pk) + let msg_hash = (commit, proof, output_pk.serialize_vec(true).to_vec()).hash(); + let msg = secp::Message::from_slice(&msg_hash.as_bytes())?; + let sig = aggsig::sign_single(secp, &msg, &r, None, None)?; + + Ok(( + tx.with_output(Output::new_nitx( + OutputFeatures::Plain, + commit, + proof, + ephemeral_pk, + output_pk, + sig, + )), + sum.add_blinding_factor(BlindingFactor::from_secret_key(blind)), + stealth_sum.add_blinding_factor(BlindingFactor::from_secret_key(r)), + )) + }, + ) +} + +/// Builds a complete non-interactive transaction. +pub fn transaction( + features: KernelFeatures, + elems: &[Box>], + keychain: &K, + builder: &B, +) -> Result +where + K: Keychain, + B: ProofBuild, +{ + let mut kernel = TxKernel::with_features(features); + + // Construct the message to be signed. + let msg = kernel.msg_to_sign()?; + + let mut ctx = Context { keychain, builder }; + let (tx, sum_excess, sum_stealth) = elems.iter().fold( + Ok((Transaction::empty(), BlindSum::new(), BlindSum::new())), + |acc, elem| elem(&mut ctx, acc), + )?; + + let offset = BlindingFactor::rand(); + let stealth_offset = BlindingFactor::rand(); + + // Generate kernel public excess and associated signature. + let total_excess = keychain.blind_sum(&sum_excess)?; + let kern_excess = total_excess.split(&offset)?; + let key = kern_excess.secret_key()?; + kernel.excess = keychain.secp().commit(0, key.clone())?; + + let total_stealth_excess = keychain.blind_sum(&sum_stealth)?; + let kern_stealth_excess = total_stealth_excess.split(&stealth_offset)?; + let stealth_key = kern_stealth_excess.secret_key()?; + + kernel.proof = KernelProof::NonInteractive { + stealth_excess: keychain.secp().commit(0, stealth_key.clone())?, + signature: aggsig::sign_dual_key(&keychain.secp(), &msg, &key, &stealth_key)?, + }; + kernel.verify()?; + + // Update tx with new kernel and offset. + let mut tx = tx.replace_kernel(kernel); + tx.offset = offset; + tx.stealth_offset = Some(stealth_offset); + Ok(tx) +} + +// Just a simple test, most exhaustive tests in the core. +#[cfg(test)] +mod test { + use rand::distributions::Alphanumeric; + use rand::{thread_rng, Rng}; + use std::sync::Arc; + use util::RwLock; + + use super::*; + use crate::core::hash::Hash; + use crate::core::transaction::Weighting; + use crate::core::verifier_cache::{LruVerifierCache, VerifierCache}; + use crate::global; + use crate::libtx::ProofBuilder; + use keychain::{ExtKeychain, ExtKeychainPath}; + + fn verifier_cache() -> Arc> { + Arc::new(RwLock::new(LruVerifierCache::new())) + } + + fn rand_hash() -> Hash { + let rnd: String = thread_rng().sample_iter(&Alphanumeric).take(32).collect(); + Hash::from_vec(rnd.as_bytes()) + } + + fn rand_sk() -> SecretKey { + SecretKey::from_slice(rand_hash().as_bytes()).unwrap() + } + + fn rand_pk() -> PublicKey { + PublicKey::from_secret_key(&secp::Secp256k1::new(), &rand_sk()).unwrap() + } + + #[test] + fn build_simple_nitx() { + global::set_local_chain_type(global::ChainTypes::AutomatedTesting); + let keychain = ExtKeychain::from_random_seed(false).unwrap(); + let builder = ProofBuilder::new(&keychain); + let key_id1 = ExtKeychainPath::new(1, 1, 0, 0, 0).to_identifier(); + let key_id2 = ExtKeychainPath::new(1, 2, 0, 0, 0).to_identifier(); + let key_id3 = ExtKeychainPath::new(1, 3, 0, 0, 0).to_identifier(); + let key_id4 = ExtKeychainPath::new(1, 4, 0, 0, 0).to_identifier(); + let key_id5 = ExtKeychainPath::new(1, 5, 0, 0, 0).to_identifier(); + + let vc = verifier_cache(); + + let input1 = input( + rand_hash(), + 10, + OutputFeatures::Plain, + key_id1, + key_id2, + Some(rand_sk()), + ); + + let input2 = input( + rand_hash(), + 12, + OutputFeatures::Plain, + key_id3, + key_id4, + Some(rand_sk()), + ); + + let receiver_addr1 = StealthAddress { + A: rand_pk(), + B: rand_pk(), + }; + let output1 = output(20, key_id5, receiver_addr1); + + let tx = transaction( + KernelFeatures::Plain { fee: 2 }, + &[input1, input2, output1], + &keychain, + &builder, + ) + .unwrap(); + + tx.validate(Weighting::AsTransaction, vc.clone()).unwrap(); + } +} diff --git a/core/tests/block.rs b/core/tests/block.rs index 9f4292dbd5..d6ff832145 100644 --- a/core/tests/block.rs +++ b/core/tests/block.rs @@ -534,7 +534,7 @@ fn block_single_tx_serialized_size() { let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0); let b = new_block(&[tx1], &keychain, &builder, &prev, &key_id); - // Default protocol version (3) + // Default protocol version (4) let mut vec = Vec::new(); ser::serialize_default(&mut vec, &b).expect("serialization failed"); assert_eq!(vec.len(), 2_669); @@ -576,10 +576,10 @@ fn block_single_tx_serialized_size() { ser::serialize(&mut vec, ser::ProtocolVersion(3), &b).expect("serialization failed"); assert_eq!(vec.len(), 2_669); - // Default protocol version (3) for completeness + // Default protocol version (4) for completeness let mut vec = Vec::new(); ser::serialize_default(&mut vec, &b).expect("serialization failed"); - assert_eq!(vec.len(), 2_669); + assert_eq!(vec.len(), 2_670); } #[test] diff --git a/core/tests/core.rs b/core/tests/core.rs index 3546b354da..fae4cea99e 100644 --- a/core/tests/core.rs +++ b/core/tests/core.rs @@ -43,10 +43,10 @@ fn test_setup() { fn simple_tx_ser() { let tx = tx2i1o(); - // Default protocol version (3). + // Default protocol version (4). let mut vec = Vec::new(); ser::serialize_default(&mut vec, &tx).expect("serialization failed"); - assert_eq!(vec.len(), 945); + assert_eq!(vec.len(), 946); // Explicit protocol version 3. let mut vec = Vec::new(); @@ -84,10 +84,15 @@ fn simple_tx_ser() { ser::serialize(&mut vec, ser::ProtocolVersion(3), &tx).expect("serialization failed"); assert_eq!(vec.len(), 945); + // Explicit protocol version 4. + let mut vec = Vec::new(); + ser::serialize(&mut vec, ser::ProtocolVersion(4), &tx).expect("serialization failed"); + assert_eq!(vec.len(), 948); + // And default protocol version for completeness. let mut vec = Vec::new(); ser::serialize_default(&mut vec, &tx).expect("serialization failed"); - assert_eq!(vec.len(), 945); + assert_eq!(vec.len(), 948); } #[test] diff --git a/keychain/src/lib.rs b/keychain/src/lib.rs index 3d61054d02..a1678299b8 100644 --- a/keychain/src/lib.rs +++ b/keychain/src/lib.rs @@ -28,6 +28,7 @@ extern crate lazy_static; pub mod base58; pub mod extkey_bip32; pub mod mnemonic; +pub mod stealth; mod types; pub mod view_key; diff --git a/keychain/src/stealth.rs b/keychain/src/stealth.rs new file mode 100644 index 0000000000..dd1b698412 --- /dev/null +++ b/keychain/src/stealth.rs @@ -0,0 +1,57 @@ +#![allow(non_snake_case)] + +use crate::blake2::blake2b::Blake2b; +use crate::types::Error; +use crate::util::secp::key::{PublicKey, SecretKey}; +use crate::util::secp::Secp256k1; + +/// A stealth address containing the pair of public keys (A=aG, B=bG) +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct StealthAddress { + pub A: PublicKey, + pub B: PublicKey, +} + +impl StealthAddress { + pub fn calc_shared_secrets( + &self, + secp: &Secp256k1, + r: &SecretKey, + ) -> Result<(SecretKey, SecretKey), Error> { + let mut rA = self.A; + rA.mul_assign(secp, &r)?; + + let mut hasher = Blake2b::new(32); + hasher.update(&b"k"[..]); + hasher.update(&rA.serialize_vec(true)); + hasher.update(&self.A.serialize_vec(true)); + hasher.update(&self.B.serialize_vec(true)); + let k = hasher.finalize(); + + let mut hasher = Blake2b::new(32); + hasher.update(&b"q"[..]); + hasher.update(&rA.serialize_vec(true)); + hasher.update(&self.A.serialize_vec(true)); + hasher.update(&self.B.serialize_vec(true)); + let q = hasher.finalize(); + + Ok(( + SecretKey::from_slice(k.as_bytes())?, + SecretKey::from_slice(q.as_bytes())?, + )) + } +} + +/// A view key containing the secret key a and public key B=bG +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct ViewKey { + pub a: SecretKey, + pub B: PublicKey, +} + +/// A spend key containing the pair of secret keys (a,b) +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct SpendKey { + pub a: SecretKey, + pub b: SecretKey, +}