Skip to content

Commit

Permalink
round if linear or scaled_linear
Browse files Browse the repository at this point in the history
  • Loading branch information
hlky committed Feb 7, 2025
1 parent acbe140 commit 6544e47
Showing 1 changed file with 4 additions and 0 deletions.
4 changes: 4 additions & 0 deletions src/diffusers/schedulers/scheduling_dpmsolver_multistep.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,11 +400,15 @@ def set_timesteps(
sigmas = np.flip(sigmas).copy()
sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=num_inference_steps)
timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])
if self.config.beta_schedule in {"linear", "scaled_linear"}:
timesteps = timesteps.round()
elif self.config.use_lu_lambdas:
lambdas = np.flip(log_sigmas.copy())
lambdas = self._convert_to_lu(in_lambdas=lambdas, num_inference_steps=num_inference_steps)
sigmas = np.exp(lambdas)
timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])
if self.config.beta_schedule in {"linear", "scaled_linear"}:
timesteps = timesteps.round()
elif self.config.use_exponential_sigmas:
sigmas = np.flip(sigmas).copy()
sigmas = self._convert_to_exponential(in_sigmas=sigmas, num_inference_steps=num_inference_steps)
Expand Down

0 comments on commit 6544e47

Please sign in to comment.