Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Try again: Run unit tests in debug mode (#1238) #1273

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,18 @@ jobs:
if: ${{ matrix.os }} == 'ubuntu-latest'
run: |
echo "Pre cleanup"
echo "Listing 100 largest packages"
dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100
df -h
sudo apt-get remove -y '^dotnet-.*'
sudo apt-get remove -y azure-cli google-cloud-sdk microsoft-edge-stable google-chrome-stable firefox powershell mono-devel
sudo apt-get autoremove -y
sudo apt-get clean
df -h
echo "Removing large directories"
rm -rf /usr/share/dotnet/
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
echo "Post cleanup"
df -h
sudo apt-get install protobuf-compiler
- name: Check out code
Expand Down
11 changes: 7 additions & 4 deletions ci/script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ SRTOOL_VERSION="${SRTOOL_VERSION:-1.66.1-0.9.25}"
PACKAGE="${PACKAGE:-centrifuge-runtime}" # Need to replicate job for all runtimes
RUNTIME="${RUNTIME:-centrifuge}"

# Reusing the same features for different builds will safe storage in CI jobs
FEATURES="--features=runtime-benchmarks,try-runtime,fast-runtime"

# Enable warnings about unused extern crates
export RUSTFLAGS=" -W unused-extern-crates"

Expand Down Expand Up @@ -37,11 +40,11 @@ case $TARGET in
;;

tests)
RUST_MIN_STACK=8388608 cargo test --workspace --release --features runtime-benchmarks,try-runtime --exclude runtime-integration-tests
cargo test --workspace $FEATURES --exclude runtime-integration-tests
;;

integration)
RUST_MIN_STACK=8388608 cargo test --release --package runtime-integration-tests --features fast-runtime
cargo test --package runtime-integration-tests $FEATURES
;;

fmt)
Expand All @@ -53,14 +56,14 @@ case $TARGET in
;;

clippy)
cargo clippy --workspace -- -D warnings -A clippy::unnecessary-cast -A clippy::bool-to-int-with-if
cargo clippy --workspace $FEATURES -- -D warnings -A clippy::unnecessary-cast -A clippy::bool-to-int-with-if
;;

benchmark)
./scripts/runtime_benchmarks.sh $RUNTIME
;;

benchmark-check)
./scripts/check_benchmarks.sh $RUNTIME
./scripts/check_benchmarks.sh $RUNTIME debug $FEATURES

esac
4 changes: 2 additions & 2 deletions docs/DEVELOPING.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ and another one to verify how it works in a more real environment as a parachain
The following command will run the unit and integration tests:

```bash
cargo +nightly test --workspace --release --features runtime-benchmarks,try-runtime
cargo test --workspace --features runtime-benchmarks,try-runtime
```

### Environment tests
Expand Down Expand Up @@ -84,7 +84,7 @@ You can play with it from the parachain client, make transfers, inspect events,
## Linting

### Source code
Lint the source code with `cargo +nightly fmt`. This excludes certain paths (defined in `rustfmt.toml`) that we want to stay as close as possible to `paritytech/substrate` to simplify upgrading to new releases.
Lint the source code with `cargo fmt`. This excludes certain paths (defined in `rustfmt.toml`) that we want to stay as close as possible to `paritytech/substrate` to simplify upgrading to new releases.

### Cargo.toml files
1. Install [taplo](https://github.com/tamasfe/taplo) with `cargo install taplo-cli`.
Expand Down
1 change: 1 addition & 0 deletions pallets/block-rewards/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use sp_runtime::traits::{One, Zero};
use super::*;
use crate::{pallet::Config, Pallet as BlockRewards};

#[allow(clippy::identity_op)]
const REWARD: u128 = 1 * CFG;
const SEED: u32 = 0;

Expand Down
6 changes: 3 additions & 3 deletions pallets/crowdloan-claim/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ benchmarks! {
get_contribution::<T>(CONTRIBUTION)
);
let locked_at: T::BlockNumber = 1u32.into();
let index: TrieIndex = 1u32.into();
let index: TrieIndex = 1u32;
let lease_start: T::BlockNumber = 1u32.into();
let lease_period: T::BlockNumber = 1u32.into();
}: _(RawOrigin::Root, contributions, locked_at, index, lease_start, lease_period)
Expand Down Expand Up @@ -117,7 +117,7 @@ benchmarks! {
}

set_crowdloan_trie_index {
let index: TrieIndex = 1u32.into();
let index: TrieIndex = 1u32;
}: _(RawOrigin::Root, index)
verify {
assert!(Pallet::<T>::crowdloan_trie_index().is_some());
Expand Down Expand Up @@ -337,7 +337,7 @@ fn get_root<T: Config>(relay: T::RelayChainAccountId, contribution: T::Balance)
let node_01 = proofs::hashing::sort_hash_of::<ProofVerifier<T>>(node_2, node_3);
let node_000 = proofs::hashing::sort_hash_of::<ProofVerifier<T>>(node_00, node_01);

proofs::hashing::sort_hash_of::<ProofVerifier<T>>(node_000, node_4).into()
proofs::hashing::sort_hash_of::<ProofVerifier<T>>(node_000, node_4)
}

fn init_pallets<T: Config>(relay_account: T::RelayChainAccountId) {
Expand Down
14 changes: 7 additions & 7 deletions pallets/keystore/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ benchmarks! {
let n in 1..T::MaxKeys::get();
let caller: T::AccountId = account("acc_0", 0, 0);
let test_keys: Vec<AddKey<T::Hash>> = build_test_keys::<T>(n);
T::Currency::deposit_creating(&caller.clone().into(), T::DefaultKeyDeposit::get() * n as u128);
let origin = RawOrigin::Signed(caller.clone());
T::Currency::deposit_creating(&caller, T::DefaultKeyDeposit::get() * n as u128);
let origin = RawOrigin::Signed(caller);
}: add_keys(origin, test_keys)
verify {
assert_eq!(Keys::<T>::iter().collect::<Vec<_>>().len() as u32, n);
Expand All @@ -50,7 +50,7 @@ benchmarks! {
}: revoke_keys(origin, key_hashes, KeyPurpose::P2PDiscovery)
verify {
assert_eq!(Keys::<T>::iter().collect::<Vec<_>>().len() as u32, n);
assert!(all_keys_are_revoked::<T>(caller.clone()));
assert!(all_keys_are_revoked::<T>(caller));
}

set_deposit {
Expand All @@ -63,7 +63,7 @@ benchmarks! {

fn all_keys_are_revoked<T: Config>(account_id: T::AccountId) -> bool {
for (_, key) in Keys::<T>::iter_prefix(account_id) {
if let None = key.revoked_at {
if key.revoked_at.is_none() {
return false;
}
}
Expand All @@ -73,7 +73,7 @@ fn all_keys_are_revoked<T: Config>(account_id: T::AccountId) -> bool {

fn add_keys_to_storage<T: Config>(account_id: T::AccountId, keys: Vec<AddKey<T::Hash>>) {
for key in keys.iter() {
let key_id: KeyId<T::Hash> = (key.key.clone(), key.purpose.clone());
let key_id: KeyId<T::Hash> = (key.key, key.purpose.clone());

Keys::<T>::insert(
account_id.clone(),
Expand All @@ -92,7 +92,7 @@ fn build_test_keys<T: Config>(n: u32) -> Vec<AddKey<T::Hash>> {
let mut keys: Vec<AddKey<T::Hash>> = Vec::new();

for i in 0..n {
let hash = format!("some_hash_{}", i);
let hash = format!("some_hash_{i}");

let key_hash = T::Hashing::hash(hash.as_bytes());

Expand All @@ -103,7 +103,7 @@ fn build_test_keys<T: Config>(n: u32) -> Vec<AddKey<T::Hash>> {
});
}

return keys;
keys
}

impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::Runtime,);
18 changes: 9 additions & 9 deletions pallets/migration/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ benchmarks! {
let mut i = 0;
for account in &test_data::system_account::SYSTEM_ACCOUNT {
i += 1;
let key = account.key.iter().cloned().collect();
let value = account.value.iter().cloned().collect();
let key = account.key.to_vec();
let value = account.value.to_vec();

data.push((key, value));

Expand All @@ -57,13 +57,13 @@ benchmarks! {
let additional_issuance: <T as pallet_balances::Config>::Balance =
codec::Decode::decode(&mut test_data::balances_total_issuance::TOTAL_ISSUANCE.value[..].as_ref()).unwrap();

let old_issuance: <T as pallet_balances::Config>::Balance = pallet_balances::Pallet::<T>::total_issuance().into();
let old_issuance: <T as pallet_balances::Config>::Balance = pallet_balances::Pallet::<T>::total_issuance();

}: _(RawOrigin::Root, additional_issuance.clone())
}: _(RawOrigin::Root, additional_issuance)
verify {
assert_eq!(
additional_issuance + old_issuance,
pallet_balances::Pallet::<T>::total_issuance().into()
pallet_balances::Pallet::<T>::total_issuance()
);
}
migrate_vesting_vesting{
Expand All @@ -77,7 +77,7 @@ benchmarks! {
let mut i = 0;
for vesting in &test_data::vesting_vesting::VESTING_VESTING {
i += 1;
let key: Vec<u8> = vesting.key.iter().cloned().collect();
let key: Vec<u8> = vesting.key.to_vec();
let vesting: VestingInfo<<<T as pallet_vesting::Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance,
T::BlockNumber> =
codec::Decode::decode(&mut vesting.value[..].as_ref()).unwrap();
Expand All @@ -91,7 +91,7 @@ benchmarks! {
).as_slice()
).unwrap();

data.push((account_id.into(), vesting));
data.push((account_id, vesting));

if i == n {
break;
Expand All @@ -103,7 +103,7 @@ benchmarks! {
for ( id, vesting_info) in data {
let storage_vesting_info: VestingInfo<<<T as pallet_vesting::Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance,
T::BlockNumber> =
pallet_vesting::Vesting::<T>::get(id).unwrap().first().unwrap().clone();
*pallet_vesting::Vesting::<T>::get(id).unwrap().first().unwrap();

assert_eq!(vesting_info, storage_vesting_info);
}
Expand Down Expand Up @@ -136,7 +136,7 @@ benchmarks! {
let mut i = 0;
for proxy in proxies {
i += 1;
let key: Vec<u8> = proxy.key.iter().cloned().collect();
let key: Vec<u8> = proxy.key.to_vec();
let proxy_info: (
BoundedVec<
ProxyDefinition<T::AccountId, T::ProxyType, T::BlockNumber>,
Expand Down
12 changes: 6 additions & 6 deletions pallets/nft-sales/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ benchmarks! {
// Add an NFT for sale
add {
let seller_account = account::<T::AccountId>("seller", 0, 0);
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone()).into();
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone());
deposit_native_balance::<T>(&seller_account);

// We need the NFT to exist in the pallet-uniques before we can put it for sale
Expand All @@ -47,7 +47,7 @@ benchmarks! {
// Remove an NFT from sale
remove {
let seller_account = account::<T::AccountId>("seller", 0, 0);
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone()).into();
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone());
deposit_native_balance::<T>(&seller_account);

// We need the NFT to exist in the pallet-uniques before we can put it for sale
Expand All @@ -56,7 +56,7 @@ benchmarks! {
let price: Price<crate::CurrencyOf<T>, crate::BalanceOf<T>> = Price { currency: AUSD_CURRENCY_ID.into(), amount: 10_000u128.into() };

// We need the nft in the storage beforehand to be able to remove it
<Sales<T>>::insert(collection_id.clone(), item_id.clone(), Sale { seller: seller_account, price});
<Sales<T>>::insert(collection_id, item_id, Sale { seller: seller_account, price});

}: _(seller_origin, collection_id, item_id)
verify {
Expand All @@ -66,7 +66,7 @@ benchmarks! {
// Remove an NFT from sale
buy {
let seller_account = account::<T::AccountId>("seller", 0, 0);
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone()).into();
let seller_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(seller_account.clone());
deposit_native_balance::<T>(&seller_account);

// We need the NFT to exist in the pallet-uniques before we can put it for sale
Expand All @@ -75,11 +75,11 @@ benchmarks! {
let price: Price<crate::CurrencyOf<T>, crate::BalanceOf<T>> = Price { currency: AUSD_CURRENCY_ID.into(), amount: 10_000u128.into() };

// We need the nft in the storage beforehand to be able to remove it
<Sales<T>>::insert(collection_id.clone(), item_id.clone(), Sale { seller: seller_account, price: price.clone()});
<Sales<T>>::insert(collection_id, item_id, Sale { seller: seller_account, price: price.clone()});

// We need the buyer to have enough balance to pay for the NFT
let buyer_account = account::<T::AccountId>("buyer", 0, 0);
let buyer_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(buyer_account.clone()).into();
let buyer_origin: RawOrigin<T::AccountId> = RawOrigin::Signed(buyer_account.clone());
deposit_token_balance::<T>(&buyer_account, AUSD_CURRENCY_ID, 100_000u128.into());

}: _(buyer_origin, collection_id, item_id, price)
Expand Down
3 changes: 2 additions & 1 deletion pallets/order-book/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,12 @@ benchmarks! {
fill_order_full {
let (account_0, account_1, asset_0, asset_1) = set_up_users_currencies::<T>()?;

let order_id = Pallet::<T>::place_order(account_0.clone(), asset_0, asset_1, 100u32.into(), 10u32.into(), 100u32.into())?;
let order_id = Pallet::<T>::place_order(account_0, asset_0, asset_1, 100u32.into(), 10u32.into(), 100u32.into())?;

}:fill_order_full(RawOrigin::Signed(account_1.clone()), order_id)
}

#[allow(clippy::type_complexity)]
fn set_up_users_currencies<T: Config<AssetCurrencyId = CurrencyId, ForeignCurrencyBalance = u128>>(
) -> Result<
(
Expand Down
16 changes: 8 additions & 8 deletions pallets/permissions/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ benchmarks! {
let with_role = T::Role::editor();
let role = T::Role::editor();
let pool_id: T::Scope = Default::default();
}:add(RawOrigin::Root, with_role.clone(), acc.clone(), pool_id.clone(), role.clone())
}:add(RawOrigin::Root, with_role, acc.clone(), pool_id.clone(), role.clone())
verify {
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id, acc, role));
}
Expand All @@ -59,12 +59,12 @@ benchmarks! {
let pool_id: T::Scope = Default::default();
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role.clone(), acc.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role.clone()));
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role));

// setup borrower through pool admin
let acc2 = admin::<T>(1);
let role = T::Role::user();
}:add(RawOrigin::Signed(acc.clone()), with_role.clone(), acc2.clone(), pool_id.clone(), role.clone())
}:add(RawOrigin::Signed(acc.clone()), with_role, acc2.clone(), pool_id.clone(), role.clone())
verify {
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id, acc2, role));
}
Expand All @@ -78,7 +78,7 @@ benchmarks! {
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role.clone(), acc.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role.clone()));
}:remove(RawOrigin::Root, with_role.clone(), acc.clone(), pool_id.clone(), role.clone())
}:remove(RawOrigin::Root, with_role, acc.clone(), pool_id.clone(), role.clone())
verify {
assert!(!<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id, acc, role));
}
Expand All @@ -91,15 +91,15 @@ benchmarks! {
let pool_id: T::Scope = Default::default();
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role.clone(), acc.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role.clone()));
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role));

// setup borrower through pool admin
let acc2 = admin::<T>(1);
let role = T::Role::user();
let res = PermissionsPallet::<T>::add(RawOrigin::Signed(acc.clone()).into(), with_role.clone(), acc2.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc2.clone(), role.clone()));
}:remove(RawOrigin::Signed(acc), with_role.clone(), acc2.clone(), pool_id.clone(), role.clone())
}:remove(RawOrigin::Signed(acc), with_role, acc2.clone(), pool_id.clone(), role.clone())
verify {
assert!(!<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id, acc2, role));
}
Expand All @@ -110,7 +110,7 @@ benchmarks! {
let with_role = T::Role::editor();
let role = T::Role::editor();
let pool_id: T::Scope = Default::default();
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role.clone(), acc.clone(), pool_id.clone(), role.clone());
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role, acc.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role.clone()));
}:_(RawOrigin::Signed(acc.clone()), pool_id.clone())
Expand All @@ -124,7 +124,7 @@ benchmarks! {
let with_role = T::Role::editor();
let role = T::Role::editor();
let pool_id: T::Scope = Default::default();
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role.clone(), acc.clone(), pool_id.clone(), role.clone());
let res = PermissionsPallet::<T>::add(RawOrigin::Root.into(), with_role, acc.clone(), pool_id.clone(), role.clone());
assert_ok!(res);
assert!(<PermissionsPallet::<T> as TPermissions<T::AccountId>>::has(pool_id.clone(), acc.clone(), role.clone()));
}:_(RawOrigin::Root, acc.clone(), pool_id.clone())
Expand Down
Loading