Skip to content

Commit

Permalink
Remove tests that moved to golem (#1049)
Browse files Browse the repository at this point in the history
* Del tests that fully moved to GOLEM

* WIP transfer tests

* Fixes after rebase

* Remove some tests moved to golem

* Remove some tests moved to golem

* Remove some tests moved to golem

* Remove some tests moved to golem

* Fixes after rebase

* Remove PrimaryNode and SecondaryNode

* Fix test_complex_search_space_tuning_correct
  • Loading branch information
YamLyubov authored Feb 21, 2023
1 parent 9ed994c commit 2a4ce8e
Show file tree
Hide file tree
Showing 56 changed files with 147 additions and 3,345 deletions.
114 changes: 0 additions & 114 deletions examples/advanced/fedot_based_solutions/graph_model_optimization.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
path = os.path.join(os.path.expanduser("~"), 'memory_profiler')
full_path_train, full_path_test = get_scoring_data()
arguments = {'train_file_path': full_path_train, 'test_file_path': full_path_test}
print(path)
MemoryProfiler(run_credit_scoring_problem, kwargs=arguments, path=path,
roots=[run_credit_scoring_problem], max_depth=8)
roots=[run_credit_scoring_problem], max_depth=8, visualization=True)

# EXAMPLE of TimeProfiler.

Expand Down
10 changes: 0 additions & 10 deletions examples/data/custom_encoded.csv

This file was deleted.

3 changes: 1 addition & 2 deletions test/integration/automl/test_automl.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@
h2o_regression_pipeline_evaluation, h2o_ts_pipeline_evaluation
from examples.advanced.automl.pipeline_from_automl import run_pipeline_from_automl
from examples.advanced.automl.tpot_vs_fedot import run_tpot_vs_fedot_example
from fedot.core.repository.operation_types_repository import OperationTypesRepository, get_operations_for_task
from fedot.core.repository.tasks import TaskTypesEnum, Task
from fedot.core.repository.operation_types_repository import OperationTypesRepository
from fedot.core.utils import fedot_project_root


Expand Down
10 changes: 5 additions & 5 deletions test/integration/preprocessing/test_pipeline_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest

from fedot.core.data.data import InputData
from fedot.core.pipelines.node import PrimaryNode
from fedot.core.pipelines.node import PipelineNode
from fedot.core.pipelines.pipeline import Pipeline
from fedot.preprocessing.dummy_preprocessing import DummyPreprocessor
from fedot.preprocessing.preprocessing import DataPreprocessor
Expand All @@ -16,7 +16,7 @@ def test_pipeline_has_dummy_preprocessor_with_disabled_preprocessing():
"""
Tests pipeline with disabled input data preprocessing has dummy preprocessor
"""
pipeline = Pipeline(PrimaryNode('ridge'), use_input_preprocessing=False)
pipeline = Pipeline(PipelineNode('ridge'), use_input_preprocessing=False)
assert type(pipeline.preprocessor) is DummyPreprocessor


Expand All @@ -42,7 +42,7 @@ def test_disabled_pipeline_data_preprocessing(case: Tuple[Callable[[], InputData
data_getter, is_fit_stage = case
input_data = data_getter()

pipeline = Pipeline(PrimaryNode('ridge'), use_input_preprocessing=False)
pipeline = Pipeline(PipelineNode('ridge'), use_input_preprocessing=False)
if is_fit_stage:
preprocessed_data = pipeline._preprocess(input_data, is_fit_stage=is_fit_stage)
else:
Expand Down Expand Up @@ -94,11 +94,11 @@ def test_data_preprocessor_performs_obligatory_data_preprocessing_only_once():
def test_data_preprocessor_performs_optional_data_preprocessing_only_once():
input_data = data_with_only_categorical_features()
preprocessor = DataPreprocessor()
pipeline = Pipeline(PrimaryNode('ridge'))
pipeline = Pipeline(PipelineNode('ridge'))

preprocessed_data = preprocessor.optional_prepare_for_fit(pipeline, input_data)

other_pipeline = Pipeline(PrimaryNode('dt'))
other_pipeline = Pipeline(PipelineNode('dt'))
preprocessed_data_same = preprocessor.optional_prepare_for_fit(other_pipeline, preprocessed_data)

assert id(preprocessed_data) == id(preprocessed_data_same)
Expand Down
29 changes: 0 additions & 29 deletions test/unit/adapter/mock_adapter.py

This file was deleted.

79 changes: 79 additions & 0 deletions test/unit/adapter/test_adapt_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
from copy import deepcopy
from random import choice

import numpy as np
import pytest

from golem.core.dag.graph_node import GraphNode
from golem.core.dag.graph_verifier import GraphVerifier
from golem.core.dag.verification_rules import DEFAULT_DAG_RULES
from golem.core.optimisers.graph import OptNode
Expand All @@ -10,6 +13,8 @@
from fedot.core.pipelines.node import PipelineNode
from fedot.core.pipelines.pipeline import Pipeline
from fedot.core.pipelines.pipeline_builder import PipelineBuilder
from test.unit.dag.test_graph_utils import find_first
from test.unit.tasks.test_regression import get_synthetic_regression_data


def get_pipelines():
Expand Down Expand Up @@ -41,6 +46,80 @@ def get_pipelines():
skip_connection_structure]


def generate_so_complex_pipeline():
node_imp = PipelineNode('simple_imputation')
node_lagged = PipelineNode('lagged', nodes_from=[node_imp])
node_ridge = PipelineNode('ridge', nodes_from=[node_lagged])
node_decompose = PipelineNode('decompose', nodes_from=[node_lagged, node_ridge])
node_pca = PipelineNode('pca', nodes_from=[node_decompose])
node_final = PipelineNode('ridge', nodes_from=[node_ridge, node_pca])
pipeline = Pipeline(node_final)
return pipeline


def pipeline_with_custom_parameters(alpha_value):
node_scaling = PipelineNode('scaling')
node_norm = PipelineNode('normalization')
node_dtreg = PipelineNode('dtreg', nodes_from=[node_scaling])
node_lasso = PipelineNode('lasso', nodes_from=[node_norm])
node_final = PipelineNode('ridge', nodes_from=[node_dtreg, node_lasso])
node_final.parameters = {'alpha': alpha_value}
pipeline = Pipeline(node_final)

return pipeline


def test_pipeline_adapters_params_correct():
""" Checking the correct conversion of hyperparameters in nodes when nodes
are passing through adapter
"""
init_alpha = 12.1
pipeline = pipeline_with_custom_parameters(init_alpha)

# Convert into OptGraph object
adapter = PipelineAdapter()
opt_graph = adapter.adapt(pipeline)
# Get Pipeline object back
restored_pipeline = adapter.restore(opt_graph)
# Get hyperparameter value after pipeline restoration
restored_alpha = restored_pipeline.root_node.parameters['alpha']
assert np.isclose(init_alpha, restored_alpha)


def test_preds_before_and_after_convert_equal():
""" Check if the pipeline predictions change before and after conversion
through the adapter
"""
init_alpha = 12.1
pipeline = pipeline_with_custom_parameters(init_alpha)

# Generate data
input_data = get_synthetic_regression_data(n_samples=10, n_features=2,
random_state=2021)
# Init fit
pipeline.fit(input_data)
init_preds = pipeline.predict(input_data)

# Convert into OptGraph object
adapter = PipelineAdapter()
opt_graph = adapter.adapt(pipeline)
restored_pipeline = adapter.restore(opt_graph)

# Restored pipeline fit
restored_pipeline.fit(input_data)
restored_preds = restored_pipeline.predict(input_data)

assert np.array_equal(init_preds.predict, restored_preds.predict)


def test_no_opt_or_graph_nodes_after_adapt_so_complex_graph():
adapter = PipelineAdapter()
pipeline = generate_so_complex_pipeline()
adapter.adapt(pipeline)

assert not find_first(pipeline, lambda n: type(n) in (GraphNode, OptNode))


@pytest.mark.parametrize('pipeline', get_pipelines())
def test_pipelines_adapt_properly(pipeline):
adapter = PipelineAdapter()
Expand Down
Loading

0 comments on commit 2a4ce8e

Please sign in to comment.