Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/cargo/tagged-base64-0.3.1
Browse files Browse the repository at this point in the history
  • Loading branch information
mrain authored Jun 8, 2023
2 parents 6090108 + 2664b54 commit 0443532
Show file tree
Hide file tree
Showing 12 changed files with 596 additions and 113 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/combine-prs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Combine PRs

on:
schedule:
- cron: "0 1 * * MON"
workflow_dispatch: # allows to manually trigger the workflow

# The minimum permissions required to run this Action
permissions:
contents: write
pull-requests: write
checks: read

jobs:
combine-prs:
runs-on: ubuntu-latest

steps:
- name: combine-prs
id: combine-prs
uses: github/[email protected]
with:
github_token: ${{ secrets.ORG_GITHUB_PAT }}
labels: "dependabot,combined-pr"
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ and follow [semantic versioning](https://semver.org/) for our releases.
- [#238](https://github.com/EspressoSystems/jellyfish/pull/238) add public keys into signature aggregation APIs
- [#251](https://github.com/EspressoSystems/jellyfish/pull/251) add sign_key_ref api for BLSKeyPair
- [#297](https://github.com/EspressoSystems/jellyfish/pull/297) Updated `tagged-base64` dependency to the `crates.io` package
- [#299](https://github.com/EspressoSystems/jellyfish/pull/299) For Merkle tree, `DigestAlgorithm` now returns a `Result` type.

### Removed

Expand Down
147 changes: 145 additions & 2 deletions primitives/src/aead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
//! independent of RustCrypto's upstream changes.
use crate::errors::PrimitivesError;
use ark_serialize::*;
use ark_std::{
fmt, format,
ops::{Deref, DerefMut},
Expand Down Expand Up @@ -124,7 +125,9 @@ impl fmt::Debug for DecKey {
}

/// Keypair for Authenticated Encryption with Associated Data
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[derive(
Clone, Debug, Default, Serialize, Deserialize, CanonicalSerialize, CanonicalDeserialize,
)]
pub struct KeyPair {
enc_key: EncKey,
dec_key: DecKey,
Expand Down Expand Up @@ -240,13 +243,127 @@ impl DerefMut for Nonce {
}

/// The ciphertext produced by AEAD encryption
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(
Clone,
Debug,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
CanonicalSerialize,
CanonicalDeserialize,
)]
pub struct Ciphertext {
nonce: Nonce,
ct: Vec<u8>,
ephemeral_pk: EncKey,
}

// TODO: (alex) Temporarily add CanonicalSerde back to these structs due to the
// limitations of `tagged` proc macro and requests from downstream usage.
// Tracking issue: <https://github.com/EspressoSystems/jellyfish/issues/288>
mod canonical_serde {
use super::*;

impl CanonicalSerialize for EncKey {
fn serialize_with_mode<W: Write>(
&self,
mut writer: W,
_compress: Compress,
) -> Result<(), SerializationError> {
let bytes: [u8; crypto_kx::PublicKey::BYTES] = self.clone().into();
writer.write_all(&bytes)?;
Ok(())
}
fn serialized_size(&self, _compress: Compress) -> usize {
crypto_kx::PublicKey::BYTES
}
}

impl CanonicalDeserialize for EncKey {
fn deserialize_with_mode<R: Read>(
mut reader: R,
_compress: Compress,
_validate: Validate,
) -> Result<Self, SerializationError> {
let mut result = [0u8; crypto_kx::PublicKey::BYTES];
reader.read_exact(&mut result)?;
Ok(EncKey(crypto_kx::PublicKey::from(result)))
}
}

impl Valid for EncKey {
fn check(&self) -> Result<(), SerializationError> {
Ok(())
}
}

impl CanonicalSerialize for DecKey {
fn serialize_with_mode<W: Write>(
&self,
mut writer: W,
_compress: Compress,
) -> Result<(), SerializationError> {
let bytes: [u8; crypto_kx::SecretKey::BYTES] = self.clone().into();
writer.write_all(&bytes)?;
Ok(())
}
fn serialized_size(&self, _compress: Compress) -> usize {
crypto_kx::SecretKey::BYTES
}
}

impl CanonicalDeserialize for DecKey {
fn deserialize_with_mode<R: Read>(
mut reader: R,
_compress: Compress,
_validate: Validate,
) -> Result<Self, SerializationError> {
let mut result = [0u8; crypto_kx::SecretKey::BYTES];
reader.read_exact(&mut result)?;
Ok(DecKey(crypto_kx::SecretKey::from(result)))
}
}
impl Valid for DecKey {
fn check(&self) -> Result<(), SerializationError> {
Ok(())
}
}

impl CanonicalSerialize for Nonce {
fn serialize_with_mode<W: Write>(
&self,
mut writer: W,
_compress: Compress,
) -> Result<(), SerializationError> {
writer.write_all(self.0.as_slice())?;
Ok(())
}
fn serialized_size(&self, _compress: Compress) -> usize {
// see <https://docs.rs/chacha20poly1305/0.10.1/chacha20poly1305/type.XNonce.html>
24
}
}

impl CanonicalDeserialize for Nonce {
fn deserialize_with_mode<R: Read>(
mut reader: R,
_compress: Compress,
_validate: Validate,
) -> Result<Self, SerializationError> {
let mut result = [0u8; 24];
reader.read_exact(&mut result)?;
Ok(Nonce(XNonce::from(result)))
}
}
impl Valid for Nonce {
fn check(&self) -> Result<(), SerializationError> {
Ok(())
}
}
}

#[cfg(test)]
mod test {
use super::*;
Expand Down Expand Up @@ -322,4 +439,30 @@ mod test {
// wrong byte length
assert!(bincode::deserialize::<Ciphertext>(&bytes[1..]).is_err());
}

#[test]
fn test_canonical_serde() {
let mut rng = jf_utils::test_rng();
let keypair = KeyPair::generate(&mut rng);
let msg = b"The quick brown fox jumps over the lazy dog".to_vec();
let aad = b"my associated data".to_vec();
let ciphertext = keypair.enc_key.encrypt(&mut rng, &msg, &aad).unwrap();

// when testing keypair, already tests serde on pk and sk
let mut bytes = Vec::new();
CanonicalSerialize::serialize_compressed(&keypair, &mut bytes).unwrap();
assert_eq!(
keypair,
KeyPair::deserialize_compressed(&bytes[..]).unwrap()
);
assert!(KeyPair::deserialize_compressed(&bytes[1..]).is_err());

let mut bytes = Vec::new();
CanonicalSerialize::serialize_compressed(&ciphertext, &mut bytes).unwrap();
assert_eq!(
ciphertext,
Ciphertext::deserialize_compressed(&bytes[..]).unwrap()
);
assert!(Ciphertext::deserialize_compressed(&bytes[1..]).is_err());
}
}
31 changes: 19 additions & 12 deletions primitives/src/merkle_tree/examples.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,16 @@
//! E.g. Sparse merkle tree with BigUInt index.
use super::{append_only::MerkleTree, prelude::RescueHash, DigestAlgorithm, Element, Index};
use crate::rescue::{sponge::RescueCRHF, RescueParameter};
use crate::{
errors::PrimitivesError,
rescue::{sponge::RescueCRHF, RescueParameter},
};
use ark_ff::Field;
use ark_serialize::{
CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
Write,
};
use ark_std::vec::Vec;
use sha3::{Digest, Sha3_256};
use typenum::U3;

Expand All @@ -23,13 +27,13 @@ pub struct Interval<F: Field>(pub F, pub F);
// impl<F: Field> Element for Interval<F> {}

impl<F: RescueParameter> DigestAlgorithm<Interval<F>, u64, F> for RescueHash<F> {
fn digest(data: &[F]) -> F {
RescueCRHF::<F>::sponge_no_padding(data, 1).unwrap()[0]
fn digest(data: &[F]) -> Result<F, PrimitivesError> {
Ok(RescueCRHF::<F>::sponge_no_padding(data, 1)?[0])
}

fn digest_leaf(pos: &u64, elem: &Interval<F>) -> F {
fn digest_leaf(pos: &u64, elem: &Interval<F>) -> Result<F, PrimitivesError> {
let data = [F::from(*pos), elem.0, elem.1];
RescueCRHF::<F>::sponge_no_padding(&data, 1).unwrap()[0]
Ok(RescueCRHF::<F>::sponge_no_padding(&data, 1)?[0])
}
}

Expand All @@ -39,7 +43,7 @@ pub type IntervalMerkleTree<F> = MerkleTree<Interval<F>, RescueHash<F>, u64, U3,

/// Update the array length here
#[derive(Default, Eq, PartialEq, Clone, Copy, Debug, Ord, PartialOrd, Hash)]
pub struct Sha3Node([u8; 32]);
pub struct Sha3Node(pub(crate) [u8; 32]);

impl AsRef<[u8]> for Sha3Node {
fn as_ref(&self) -> &[u8] {
Expand Down Expand Up @@ -82,18 +86,21 @@ impl Valid for Sha3Node {
/// Wrapper for SHA3_512 hash function
pub struct Sha3Digest();

impl<E: Element, I: Index> DigestAlgorithm<E, I, Sha3Node> for Sha3Digest {
fn digest(data: &[Sha3Node]) -> Sha3Node {
impl<E: Element + CanonicalSerialize, I: Index> DigestAlgorithm<E, I, Sha3Node> for Sha3Digest {
fn digest(data: &[Sha3Node]) -> Result<Sha3Node, PrimitivesError> {
let mut hasher = Sha3_256::new();
for value in data {
hasher.update(value);
}
Sha3Node(hasher.finalize().into())
Ok(Sha3Node(hasher.finalize().into()))
}

fn digest_leaf(_pos: &I, _elem: &E) -> Sha3Node {
// Serialize and hash
todo!()
fn digest_leaf(_pos: &I, elem: &E) -> Result<Sha3Node, PrimitivesError> {
let mut writer = Vec::new();
elem.serialize_compressed(&mut writer).unwrap();
let mut hasher = Sha3_256::new();
hasher.update(writer);
Ok(Sha3Node(hasher.finalize().into()))
}
}

Expand Down
16 changes: 8 additions & 8 deletions primitives/src/merkle_tree/hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@
//! Use [`GenericHasherMerkleTree`] if you prefer to specify your own `Arity`
//! and node [`Index`] types.
use crate::errors::PrimitivesError;

use super::{append_only::MerkleTree, DigestAlgorithm, Element, Index};
use ark_serialize::{
CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
Expand Down Expand Up @@ -75,21 +77,19 @@ where
H: Digest + Write,
<<H as OutputSizeUser>::OutputSize as ArrayLength<u8>>::ArrayType: Copy,
{
fn digest(data: &[HasherNode<H>]) -> HasherNode<H> {
fn digest(data: &[HasherNode<H>]) -> Result<HasherNode<H>, PrimitivesError> {
let mut hasher = H::new();
for value in data {
hasher.update(value.as_ref());
}
HasherNode(hasher.finalize())
Ok(HasherNode(hasher.finalize()))
}

fn digest_leaf(pos: &I, elem: &E) -> HasherNode<H> {
fn digest_leaf(pos: &I, elem: &E) -> Result<HasherNode<H>, PrimitivesError> {
let mut hasher = H::new();
pos.serialize_uncompressed(&mut hasher)
.expect("serialize should succeed");
elem.serialize_uncompressed(&mut hasher)
.expect("serialize should succeed");
HasherNode(hasher.finalize())
pos.serialize_uncompressed(&mut hasher)?;
elem.serialize_uncompressed(&mut hasher)?;
Ok(HasherNode(hasher.finalize()))
}
}

Expand Down
Loading

0 comments on commit 0443532

Please sign in to comment.