Skip to content

Commit

Permalink
fix xgboost
Browse files Browse the repository at this point in the history
Signed-off-by: xadupre <[email protected]>
  • Loading branch information
xadupre committed Dec 22, 2024
1 parent 4be1edf commit 49382a3
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 8 deletions.
13 changes: 10 additions & 3 deletions onnxmltools/utils/tests_helper.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# SPDX-License-Identifier: Apache-2.0

import unittest
import pickle
import os
import numpy
Expand Down Expand Up @@ -87,16 +88,22 @@ def dump_data_and_model(
if not os.path.exists(folder):
os.makedirs(folder)

if hasattr(model, "predict"):
if "LGBM" in model.__class__.__name__:
try:
import lightgbm
except ImportError:
lightgbm = None
raise unittest.SkipTest("lightgbm cannot be imported.")
else:
lightgbm = None
if "XGB" in model.__class__.__name__ or "Booster" in model.__class__.__name__:
try:
import xgboost
except ImportError:
xgboost = None
raise unittest.SkipTest("xgboost cannot be imported.")
else:
xgboost = None

if hasattr(model, "predict"):
if lightgbm is not None and isinstance(model, lightgbm.Booster):
# LightGBM Booster
model_dict = model.dump_model()
Expand Down
15 changes: 10 additions & 5 deletions tests/xgboost/test_xgboost_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,8 +386,13 @@ def test_xgboost_classifier_i5450(self):
iris = load_iris()
X, y = iris.data, iris.target
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=10)
clr = XGBClassifier(objective="multi:softprob", max_depth=1, n_estimators=2)
clr.fit(X_train, y_train, eval_set=[(X_test, y_test)], early_stopping_rounds=40)
clr = XGBClassifier(
objective="multi:softprob",
max_depth=1,
n_estimators=2,
early_stopping_rounds=40,
)
clr.fit(X_train, y_train, eval_set=[(X_test, y_test)])
initial_type = [("float_input", FloatTensorType([None, 4]))]
onx = convert_xgboost(
clr, initial_types=initial_type, target_opset=TARGET_OPSET
Expand Down Expand Up @@ -725,9 +730,10 @@ def test_xgb_classifier_13(self):
colsample_bytree=0.75,
random_state=42,
verbosity=0,
early_stopping_rounds=40,
)

clr.fit(X_train, y_train, eval_set=[(X_test, y_test)], early_stopping_rounds=40)
clr.fit(X_train, y_train, eval_set=[(X_test, y_test)])

initial_type = [("float_input", FloatTensorType([None, 797]))]
onx = convert_xgboost(
Expand Down Expand Up @@ -756,14 +762,13 @@ def test_xgb_classifier_13_2(self):
"early_stopping_rounds": 113,
"random_state": 42,
"max_depth": 3,
"eval_metric": ["logloss", "auc", "error"],
}
eval_metric = ["logloss", "auc", "error"]
model = XGBClassifier(**model_param)
model.fit(
X=x_train,
y=y_train,
eval_set=[(x_test, y_test)],
eval_metric=eval_metric,
verbose=False,
)

Expand Down

0 comments on commit 49382a3

Please sign in to comment.