Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bg/fix_ck_guard_in_bn : fix CK guard around bn #2464

Merged
merged 2 commits into from
Oct 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 34 additions & 35 deletions src/solver/batchnorm/backward_ck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -154,48 +154,14 @@ static bool CheckCKApplicability(const miopen::batchnorm::ProblemDescription& pr
CKArgsBNormBwd>(problem);
}

#endif

bool BnCKBwdBackward::IsApplicable(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if MIOPEN_BACKEND_HIP || MIOPEN_USE_COMPOSABLEKERNEL
if(miopen::IsDisabled(MIOPEN_DEBUG_CONV_CK_BN_BACK{}))
return false;
if(!bn_problem.IsLayoutNHWC())
return false;
if(!ck_utility::is_ck_whitelist(context.GetStream()))
return false;
if(bn_problem.GetXDesc().GetType() != bn_problem.GetScaleBiasDiffDesc().GetType())
return false;

switch(bn_problem.GetXDesc().GetType())
{
case miopenFloat: return CheckCKApplicability<F32, F32, F32, F32, F32, F32, F32>(bn_problem);
case miopenDouble: return CheckCKApplicability<F64, F64, F64, F64, F64, F64, F64>(bn_problem);
case miopenHalf: return CheckCKApplicability<F16, F32, F32, F32, F16, F32, F32>(bn_problem);
case miopenBFloat16:
return CheckCKApplicability<BF16, F32, F32, F32, BF16, F32, F32>(bn_problem);
case miopenInt32:
case miopenInt8:
case miopenInt8x4:
case miopenBFloat8:
case miopenFloat8:
default: MIOPEN_THROW("BnCKBwdBackward operation does not support this data type");
}
return false;
#endif
}

template <typename XDataType,
typename DxDataType,
typename DyDataType,
typename AccDataType,
typename ScaleDataType,
typename DscaleDbiasDataType,
typename MeanVarDataType>
ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription& bn_problem)
static ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription& bn_problem)
{
const auto& valid_kernel_ids = FillValidKernelsIDs<DeviceOpBNBwdPtrs<XDataType,
DxDataType,
Expand All @@ -218,6 +184,39 @@ ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription&
miopen::batchnorm::BwdInvokeParams>(bn_problem, kernel_id);
}

#endif

bool BnCKBwdBackward::IsApplicable(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if MIOPEN_BACKEND_HIP && MIOPEN_USE_COMPOSABLEKERNEL
if(miopen::IsDisabled(MIOPEN_DEBUG_CONV_CK_BN_BACK{}))
return false;
if(!bn_problem.IsLayoutNHWC())
return false;
if(!ck_utility::is_ck_supported_hardware(context.GetStream()))
return false;
if(bn_problem.GetXDesc().GetType() != bn_problem.GetScaleBiasDiffDesc().GetType())
return false;

switch(bn_problem.GetXDesc().GetType())
{
case miopenFloat: return CheckCKApplicability<F32, F32, F32, F32, F32, F32, F32>(bn_problem);
case miopenDouble: return CheckCKApplicability<F64, F64, F64, F64, F64, F64, F64>(bn_problem);
case miopenHalf: return CheckCKApplicability<F16, F32, F32, F32, F16, F32, F32>(bn_problem);
case miopenBFloat16:
return CheckCKApplicability<BF16, F32, F32, F32, BF16, F32, F32>(bn_problem);
case miopenInt32:
case miopenInt8:
case miopenInt8x4:
case miopenBFloat8:
case miopenFloat8: break;
}
#endif
return false;
}

ConvSolution BnCKBwdBackward::GetSolution(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
Expand Down
32 changes: 12 additions & 20 deletions src/solver/batchnorm/forward_inference_ck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -175,14 +175,11 @@ static void RunCKSolution(const Handle& handle,
}
#endif

bool BnCKFwdInference::IsApplicable(const ExecutionContext& context,
const miopen::batchnorm::ProblemDescription& bn_problem) const
bool BnCKFwdInference::IsApplicable(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if !MIOPEN_BACKEND_HIP || !MIOPEN_USE_COMPOSABLEKERNEL
std::ignore = context;
std::ignore = bn_problem;
return false;
#else
#if MIOPEN_BACKEND_HIP && MIOPEN_USE_COMPOSABLEKERNEL
if(miopen::IsDisabled(MIOPEN_DEBUG_CONV_CK_BN_INFER{}))
return false;
if(!bn_problem.IsLayoutNHWC())
Expand All @@ -202,24 +199,17 @@ bool BnCKFwdInference::IsApplicable(const ExecutionContext& context,
case miopenInt8:
case miopenInt8x4: // Support discontinued.
case miopenFloat8:
case miopenBFloat8:
default: MIOPEN_THROW("Unsupported datatype");
case miopenBFloat8: break;
}
return false;
#endif
return false;
}

ConvSolution
BnCKFwdInference::GetSolution(const ExecutionContext& context,
const miopen::batchnorm::ProblemDescription& bn_problem) const
ConvSolution BnCKFwdInference::GetSolution(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if !MIOPEN_BACKEND_HIP || !MIOPEN_USE_COMPOSABLEKERNEL
std::ignore = context;
std::ignore = bn_problem;
return {};
#else
std::ignore = context;

#if MIOPEN_BACKEND_HIP && MIOPEN_USE_COMPOSABLEKERNEL
ConvSolution result;
result.invoker_factory = [=](const std::vector<Kernel>& kernels) {
std::ignore = kernels;
Expand Down Expand Up @@ -252,6 +242,8 @@ BnCKFwdInference::GetSolution(const ExecutionContext& context,
};
};
return result;
#else
return {};
#endif
}

Expand Down
61 changes: 30 additions & 31 deletions src/solver/batchnorm/forward_training_ck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -149,44 +149,14 @@ static bool CheckCKApplicability(const miopen::batchnorm::ProblemDescription& pr
MeanVarDataType>,
CKArgsBNormFwdTraining>(problem);
}
#endif

bool BnCKFwdTraining::IsApplicable(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if MIOPEN_BACKEND_HIP || MIOPEN_USE_COMPOSABLEKERNEL
if(miopen::IsDisabled(MIOPEN_DEBUG_CONV_CK_BN_FWD_TRAINING{}))
return false;
if(!bn_problem.IsLayoutNHWC())
return false;
if(!ck_utility::is_ck_whitelist(context.GetStream()))
return false;

switch(bn_problem.GetXDesc().GetType())
{
case miopenHalf: return CheckCKApplicability<F16, F16, F32, F16, F16, F32>(bn_problem);
case miopenFloat: return CheckCKApplicability<F32, F32, F32, F32, F32, F32>(bn_problem);
case miopenDouble: return CheckCKApplicability<F64, F64, F64, F64, F64, F64>(bn_problem);
case miopenBFloat16: return CheckCKApplicability<BF16, BF16, F32, BF16, BF16, F32>(bn_problem);
case miopenInt32:
case miopenInt8:
case miopenInt8x4:
case miopenBFloat8:
case miopenFloat8:
default: MIOPEN_THROW("BnCKFwdTraining operation does not support this data type");
}
return false;
#endif
}

template <typename XDataType,
typename YDataType,
typename AccDataType,
typename ScaleDataType,
typename BiasDataType,
typename MeanVarDataType>
ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription& bn_problem)
static ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription& bn_problem)
{
const auto& valid_kernel_ids = FillValidKernelsIDs<DeviceOpBNFwdTrainingPtrs<XDataType,
YDataType,
Expand All @@ -206,6 +176,35 @@ ConvSolution MakeAnyInvokerFactory(const miopen::batchnorm::ProblemDescription&
CKArgsBNormFwdTraining,
miopen::batchnorm::InvokeParams>(bn_problem, kernel_id);
}
#endif

bool BnCKFwdTraining::IsApplicable(
[[maybe_unused]] const ExecutionContext& context,
[[maybe_unused]] const miopen::batchnorm::ProblemDescription& bn_problem) const
{
#if MIOPEN_BACKEND_HIP && MIOPEN_USE_COMPOSABLEKERNEL
if(miopen::IsDisabled(MIOPEN_DEBUG_CONV_CK_BN_FWD_TRAINING{}))
return false;
if(!bn_problem.IsLayoutNHWC())
return false;
if(!ck_utility::is_ck_supported_hardware(context.GetStream()))
return false;

switch(bn_problem.GetXDesc().GetType())
{
case miopenHalf: return CheckCKApplicability<F16, F16, F32, F16, F16, F32>(bn_problem);
case miopenFloat: return CheckCKApplicability<F32, F32, F32, F32, F32, F32>(bn_problem);
case miopenDouble: return CheckCKApplicability<F64, F64, F64, F64, F64, F64>(bn_problem);
case miopenBFloat16: return CheckCKApplicability<BF16, BF16, F32, BF16, BF16, F32>(bn_problem);
case miopenInt32:
case miopenInt8:
case miopenInt8x4:
case miopenBFloat8:
case miopenFloat8: break;
}
#endif
return false;
}

ConvSolution BnCKFwdTraining::GetSolution(
[[maybe_unused]] const ExecutionContext& context,
Expand Down