Skip to content

Commit

Permalink
fix clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
Cifko committed Mar 16, 2022
1 parent 0743f83 commit 2970e15
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 17 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions applications/tari_validator_node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ blake2 = "0.9.2"
clap = "2.33.3"
digest = "0.9.0"
futures = { version = "^0.3.1" }
itertools = "0.10.3"
log = { version = "0.4.8", features = ["std"] }
lmdb-zero = "0.4.4"
prost = "0.9"
Expand Down
11 changes: 7 additions & 4 deletions applications/tari_validator_node/src/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@

use std::{
collections::HashMap,
sync::{Arc, Mutex},
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};

use tari_dan_core::models::AssetDefinition;
Expand All @@ -34,7 +37,7 @@ pub struct Asset {
// Changes in the committee for this asset.
// Mined height of the change TXs, and the involvment in the committe (true = part of committee)
next_states: HashMap<u64, bool>,
kill_signal: Option<Arc<Mutex<bool>>>,
kill_signal: Option<Arc<AtomicBool>>,
}

impl Asset {
Expand All @@ -48,7 +51,7 @@ impl Asset {
}

pub fn update_height<Fstart>(&mut self, height: u64, start: Fstart)
where Fstart: Fn(AssetDefinition) -> Arc<Mutex<bool>> {
where Fstart: Fn(AssetDefinition) -> Arc<AtomicBool> {
if let Some((&height, &involment)) = self
.next_states
.iter()
Expand All @@ -61,7 +64,7 @@ impl Asset {
} else {
// Switch on the kill signal for the asset to end processing
let stop = self.kill_signal.clone().unwrap();
*stop.as_ref().lock().unwrap() = true;
stop.as_ref().store(true, Ordering::Relaxed);
self.kill_signal = None;
}
}
Expand Down
12 changes: 6 additions & 6 deletions applications/tari_validator_node/src/dan_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

use std::{
sync::{Arc, Mutex},
sync::{atomic::AtomicBool, Arc},
time::Duration,
};

use itertools::Itertools;
use log::*;
use tari_common::{
configuration::ValidatorNodeConfig,
Expand Down Expand Up @@ -128,12 +129,11 @@ impl DanNode {
monitoring.add_if_unmonitored(asset.clone());
monitoring.add_state(asset.public_key, mined_height, true);
}
let known_active_public_keys: Vec<PublicKey> =
assets.into_iter().map(|(asset, _)| asset.public_key).collect();
let mut known_active_public_keys = assets.into_iter().map(|(asset, _)| asset.public_key);
let active_public_keys = monitoring
.get_active_public_keys()
.into_iter()
.map(|public_key| public_key.clone())
.cloned()
.collect::<Vec<PublicKey>>();
for public_key in active_public_keys {
if !known_active_public_keys.contains(&public_key) {
Expand All @@ -159,7 +159,7 @@ impl DanNode {
let subscription_factory = subscription_factory.clone();
let shutdown = shutdown.clone();
// Create a kill signal for each asset
let kill = Arc::new(Mutex::new(false));
let kill = Arc::new(AtomicBool::new(false));
let dan_config = dan_config.clone();
let db_factory = db_factory.clone();
task::spawn(DanNode::start_asset_worker(
Expand Down Expand Up @@ -189,7 +189,7 @@ impl DanNode {
shutdown: ShutdownSignal,
config: ValidatorNodeConfig,
db_factory: SqliteDbFactory,
kill: Arc<Mutex<bool>>,
kill: Arc<AtomicBool>,
) -> Result<(), ExitError> {
let timeout = Duration::from_secs(asset_definition.phase_timeout);
let committee = asset_definition
Expand Down
8 changes: 4 additions & 4 deletions applications/tari_validator_node/src/monitoring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

use std::{
collections::HashMap,
sync::{Arc, Mutex},
sync::{atomic::AtomicBool, Arc},
};

use tari_common_types::types::PublicKey;
Expand All @@ -46,7 +46,7 @@ impl Monitoring {

pub fn add_if_unmonitored(&mut self, asset: AssetDefinition) {
if !self.assets.contains_key(&asset.public_key) {
self.assets.insert(asset.public_key.clone(), Asset::new(asset.clone()));
self.assets.insert(asset.public_key.clone(), Asset::new(asset));
}
}

Expand All @@ -59,8 +59,8 @@ impl Monitoring {
}

pub fn update_height<Fstart: Clone>(&mut self, height: u64, start: Fstart)
where Fstart: Fn(AssetDefinition) -> Arc<Mutex<bool>> {
for (_, proc) in &mut self.assets {
where Fstart: Fn(AssetDefinition) -> Arc<AtomicBool> {
for proc in self.assets.values_mut() {
proc.update_height(height, start.clone());
}
self.assets.retain(|_, proc| proc.should_monitor())
Expand Down
9 changes: 6 additions & 3 deletions dan_layer/core/src/workers/consensus_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

use std::sync::{Arc, Mutex};
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};

use log::*;
use tari_common_types::types::PublicKey;
Expand Down Expand Up @@ -117,7 +120,7 @@ impl<TSpecification: ServiceSpecification<Addr = PublicKey>> ConsensusWorker<TSp
&mut self,
shutdown: ShutdownSignal,
max_views_to_process: Option<u64>,
stop: Arc<Mutex<bool>>,
stop: Arc<AtomicBool>,
) -> Result<(), DigitalAssetError> {
let chain_db = self
.db_factory
Expand All @@ -131,7 +134,7 @@ impl<TSpecification: ServiceSpecification<Addr = PublicKey>> ConsensusWorker<TSp
"Consensus worker started for asset '{}'. Tip: {}", self.asset_definition.public_key, self.current_view_id
);
let starting_view = self.current_view_id;
while !*stop.as_ref().lock().unwrap() {
while !stop.as_ref().load(Ordering::Relaxed) {
if let Some(max) = max_views_to_process {
if max <= self.current_view_id.0 - starting_view.0 {
break;
Expand Down

0 comments on commit 2970e15

Please sign in to comment.