Skip to content

Commit

Permalink
Move maxcut warning so it will only appear once.
Browse files Browse the repository at this point in the history
  • Loading branch information
elad-c committed Jan 20, 2025
1 parent d73b4a7 commit a6dc23b
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,6 @@ def compute_resource_utilization(self,
w_total, *_ = self.compute_weights_utilization(target_criterion, bitwidth_mode, w_qcs)

if {RUTarget.ACTIVATION, RUTarget.TOTAL}.intersection(ru_targets):
Logger.warning("Using an experimental feature max-cut for activation memory utilization estimation.")
a_total = self.compute_activations_utilization(target_criterion, bitwidth_mode, act_qcs)

ru = ResourceUtilization()
Expand Down
2 changes: 2 additions & 0 deletions model_compression_toolkit/core/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ def core_runner(in_model: Any,
if core_config.mixed_precision_config is None:
Logger.critical("Provided an initialized target_resource_utilization, that means that mixed precision quantization is "
"enabled, but the provided MixedPrecisionQuantizationConfig is None.")
if target_resource_utilization.activation_restricted() or target_resource_utilization.total_mem_restricted():
Logger.warning("Using an experimental feature max-cut for activation memory utilization estimation.")
# Determine whether to use mixed precision or single precision based on target_resource_utilization.
if requires_mixed_precision(in_model,
target_resource_utilization,
Expand Down

0 comments on commit a6dc23b

Please sign in to comment.