From fb2094efd3575395bc6b394cec6b337716000c61 Mon Sep 17 00:00:00 2001 From: Payal Chaurasiya Date: Fri, 14 Feb 2025 17:40:03 +0530 Subject: [PATCH] Streamlining PR pipeline (#1378) * WIP: Streamlining PR pipeline Signed-off-by: Chaurasiya, Payal * Fix pipeline job Signed-off-by: Chaurasiya, Payal * Fix pipeline job Signed-off-by: Chaurasiya, Payal * trivy step Signed-off-by: Chaurasiya, Payal * trivy step Signed-off-by: Chaurasiya, Payal * fix eden compression Signed-off-by: Chaurasiya, Payal * experiment Signed-off-by: Chaurasiya, Payal * Revert dockerize test Signed-off-by: Chaurasiya, Payal * Review comments fix Signed-off-by: Chaurasiya, Payal * Revert trivy changes Signed-off-by: Chaurasiya, Payal --------- Signed-off-by: Chaurasiya, Payal --- .github/workflows/double_ws_export.yml | 36 ------- .github/workflows/pr_pipeline.yml | 20 +--- .github/workflows/task_runner_basic_e2e.yml | 74 ++++++++++++++- .../workflows/task_runner_e2e_resiliency.yml | 12 +-- .../workflows/taskrunner_eden_pipeline.yml | 34 ------- .github/workflows/ubuntu.yml | 56 ----------- .github/workflows/windows.yml | 44 ++++++++- .trivyignore | 1 + tests/end_to_end/utils/constants.py | 9 +- tests/github/test_double_ws_export.py | 94 ------------------- 10 files changed, 131 insertions(+), 249 deletions(-) delete mode 100644 .github/workflows/double_ws_export.yml delete mode 100644 .github/workflows/taskrunner_eden_pipeline.yml delete mode 100644 .github/workflows/ubuntu.yml delete mode 100644 tests/github/test_double_ws_export.py diff --git a/.github/workflows/double_ws_export.yml b/.github/workflows/double_ws_export.yml deleted file mode 100644 index 9e0b07a03a..0000000000 --- a/.github/workflows/double_ws_export.yml +++ /dev/null @@ -1,36 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Double workspace export - -on: - workflow_call: - workflow_dispatch: - -permissions: - contents: read - -env: - # A workaround for long FQDN names provided by GitHub actions. - FQDN: "localhost" - -jobs: - build: - if: github.event.pull_request.draft == false - runs-on: 'ubuntu-latest' - timeout-minutes: 15 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install . - - name: Double workspace export test - run: | - python -m tests.github.test_double_ws_export - diff --git a/.github/workflows/pr_pipeline.yml b/.github/workflows/pr_pipeline.yml index 1b2fa9155a..0e77720d31 100644 --- a/.github/workflows/pr_pipeline.yml +++ b/.github/workflows/pr_pipeline.yml @@ -27,10 +27,6 @@ jobs: name: Docker Bench for Security uses: ./.github/workflows/docker-bench-security.yml - double_workspace_export: - name: Double workspace export - uses: ./.github/workflows/double_ws_export.yml - fr_301_watermark_nb_run: name: Federated Runtime 301 MNIST Watermarking uses: ./.github/workflows/federated_runtime.yml @@ -47,14 +43,10 @@ jobs: name: Pytest and code coverage uses: ./.github/workflows/pytest_coverage.yml - straggler_handling: - name: Straggler Handling Test - uses: ./.github/workflows/straggler-handling.yml - - task_runner: - name: TaskRunner - uses: ./.github/workflows/taskrunner.yml - + windows: + name: Windows pytest coverage and workspace runs + uses: ./.github/workflows/windows.yml + task_runner_e2e: name: TaskRunner E2E uses: ./.github/workflows/task_runner_basic_e2e.yml @@ -63,10 +55,6 @@ jobs: name: TaskRunner E2E Resiliency uses: ./.github/workflows/task_runner_e2e_resiliency.yml - taskrunner_eden_compression: - name: TaskRunner (Eden Compression) - uses: ./.github/workflows/taskrunner_eden_pipeline.yml - tr_docker_gramine_direct: name: TaskRunner (docker/gramine-direct) uses: ./.github/workflows/tr_docker_gramine_direct.yml diff --git a/.github/workflows/task_runner_basic_e2e.yml b/.github/workflows/task_runner_basic_e2e.yml index 558c24d884..5c8a9e1ec5 100644 --- a/.github/workflows/task_runner_basic_e2e.yml +++ b/.github/workflows/task_runner_basic_e2e.yml @@ -72,7 +72,7 @@ jobs: if: | (github.event_name == 'schedule' && github.repository_owner == 'securefederatedai') || (github.event_name == 'workflow_dispatch') || - (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'task_runner_e2e')) + (github.event.pull_request.draft == false) name: Input value selection runs-on: ubuntu-22.04 outputs: @@ -326,3 +326,75 @@ jobs: if: ${{ always() }} with: test_type: "With_Memory_Logs" + + test_straggler_check: + name: Straggler Handling + runs-on: ubuntu-22.04 + timeout-minutes: 30 + if: | + (github.event_name == 'schedule' && github.repository_owner == 'securefederatedai') || + (github.event_name == 'workflow_dispatch') || + (github.event.pull_request.draft == false) + + env: + MODEL_NAME: "torch/mnist_straggler_check" + PYTHON_VERSION: "3.10" + + steps: + - name: Checkout OpenFL repository + id: checkout_openfl + uses: actions/checkout@v4 + + - name: Pre test run + uses: ./.github/actions/tr_pre_test_run + if: ${{ always() }} + + - name: Run Straggler Handling Interface Test + id: run_tests + run: | + python -m pytest -s tests/end_to_end/test_suites/task_runner_tests.py \ + -m task_runner_basic --model_name ${{ env.MODEL_NAME }} \ + --num_rounds ${{ env.NUM_ROUNDS }} --num_collaborators ${{ env.NUM_COLLABORATORS }} + echo "Straggler handling test run completed" + + - name: Post test run + uses: ./.github/actions/tr_post_test_run + if: ${{ always() }} + with: + test_type: "Straggler_Handling" + + test_eden_compression: + name: Eden Compression + runs-on: ubuntu-22.04 + timeout-minutes: 30 + if: | + (github.event_name == 'schedule' && github.repository_owner == 'securefederatedai') || + (github.event_name == 'workflow_dispatch') || + (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'eden_compression')) + + env: + MODEL_NAME: "torch/mnist_eden_compression" + PYTHON_VERSION: "3.10" + + steps: + - name: Checkout OpenFL repository + id: checkout_openfl + uses: actions/checkout@v4 + + - name: Pre test run + uses: ./.github/actions/tr_pre_test_run + if: ${{ always() }} + + - name: Run Eden Compression Test + id: run_tests + run: | + python -m pytest -s tests/end_to_end/test_suites/task_runner_tests.py \ + -m task_runner_basic --model_name ${{ env.MODEL_NAME }} \ + --num_rounds ${{ env.NUM_ROUNDS }} --num_collaborators ${{ env.NUM_COLLABORATORS }} + echo "Eden compression test run completed" + + - name: Post test run + uses: ./.github/actions/tr_post_test_run + if: ${{ always() }} + with: + test_type: "Eden_Compression" \ No newline at end of file diff --git a/.github/workflows/task_runner_e2e_resiliency.yml b/.github/workflows/task_runner_e2e_resiliency.yml index 1e145380c6..8d7f28fbbc 100644 --- a/.github/workflows/task_runner_e2e_resiliency.yml +++ b/.github/workflows/task_runner_e2e_resiliency.yml @@ -15,7 +15,7 @@ on: num_rounds: description: "Number of rounds to train" required: false - default: "50" + default: "30" type: string num_collaborators: description: "Number of collaborators" @@ -25,12 +25,12 @@ on: model_name: description: "Model name" required: false - default: "all" + default: "torch/mnist" type: choice options: - - all - torch/mnist - keras/mnist + - all python_version: description: "Python version" required: false @@ -47,9 +47,9 @@ permissions: # Environment variables common for all the jobs # DO NOT use double quotes for the values of the environment variables env: - NUM_ROUNDS: ${{ inputs.num_rounds || 50 }} + NUM_ROUNDS: ${{ inputs.num_rounds || 30 }} NUM_COLLABORATORS: ${{ inputs.num_collaborators || 2 }} - MODEL_NAME: ${{ inputs.model_name || 'all' }} + MODEL_NAME: ${{ inputs.model_name || 'torch/mnist' }} PYTHON_VERSION: ${{ inputs.python_version || '3.10' }} jobs: @@ -57,7 +57,7 @@ jobs: if: | (github.event_name == 'schedule' && github.repository_owner == 'securefederatedai') || (github.event_name == 'workflow_dispatch') || - (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'task_runner_e2e')) + (github.event.pull_request.draft == false) name: Input value selection runs-on: ubuntu-22.04 outputs: diff --git a/.github/workflows/taskrunner_eden_pipeline.yml b/.github/workflows/taskrunner_eden_pipeline.yml deleted file mode 100644 index fc8381a5b9..0000000000 --- a/.github/workflows/taskrunner_eden_pipeline.yml +++ /dev/null @@ -1,34 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: TaskRunner (Eden Compression) - -on: - workflow_call: - workflow_dispatch: - -permissions: - contents: read - -jobs: - build: - if: (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'eden_compression')) - strategy: - matrix: - os: ['ubuntu-latest', 'windows-latest'] - runs-on: ${{ matrix.os }} - timeout-minutes: 15 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install . - - name: Test TaskRunner API with Eden Compression - run: | - python -m tests.github.test_hello_federation --template torch/mnist_eden_compression --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml deleted file mode 100644 index 7da5724bde..0000000000 --- a/.github/workflows/ubuntu.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: Ubuntu (latest) - -on: - schedule: - - cron: '0 0 * * *' - -permissions: - contents: read - -env: - # A workaround for long FQDN names provided by GitHub actions. - FQDN: "localhost" - -jobs: - pytest-coverage: # from pytest_coverage.yml - strategy: - matrix: - python-version: ["3.10", "3.11", "3.12"] - runs-on: ubuntu-latest - timeout-minutes: 15 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3 - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pytest coverage - pip install -r test-requirements.txt - pip install . - - name: Test with pytest and report code coverage - run: | - coverage run -m pytest -rA - coverage report - - cli: - needs: [pytest-coverage] - runs-on: ubuntu-latest - timeout-minutes: 15 - - steps: - - uses: actions/checkout@v4 - - name: Set up Python 3 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install . - - name: Test TaskRunner API - run: | - python -m tests.github.test_hello_federation --template keras/mnist --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 --save-model output_model diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 5228edb2ac..42d94de140 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -1,6 +1,7 @@ name: Windows (latest) on: + workflow_call: schedule: - cron: '0 0 * * *' @@ -36,8 +37,7 @@ jobs: coverage run -m pytest -rA coverage report - cli: # from taskrunner.yml - needs: [pytest-coverage] + keras_mnist: # from taskrunner.yml - keras/mnist runs-on: windows-latest timeout-minutes: 15 steps: @@ -52,4 +52,42 @@ jobs: pip install . - name: Test TaskRunner API run: | - python -m tests.github.test_hello_federation --template keras/mnist --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 --save-model output_model \ No newline at end of file + python -m tests.github.test_hello_federation --template keras/mnist --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 --save-model output_model + + torch_mnist_eden_compression: # from taskrunner_eden_pipeline.yml - torch/mnist_eden_compression + runs-on: windows-latest + timeout-minutes: 15 + if: | + (github.event_name == 'schedule' && github.repository_owner == 'securefederatedai') || + (github.event_name == 'workflow_dispatch') || + (github.event.pull_request.draft == false && contains(github.event.pull_request.labels.*.name, 'eden_compression')) + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install . + - name: Test TaskRunner API with Eden Compression + run: | + python -m tests.github.test_hello_federation --template torch/mnist_eden_compression --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 + + torch_mnist_straggler_check: # from straggler-handling.yml - torch/mnist_straggler_check + runs-on: windows-latest + timeout-minutes: 15 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install . + - name: Test Straggler Handling Interface + run: | + python -m tests.github.test_hello_federation --template torch/mnist_straggler_check --fed_workspace aggregator --col1 col1 --col2 col2 --rounds-to-train 3 \ No newline at end of file diff --git a/.trivyignore b/.trivyignore index 5bf578a27d..153df1c234 100644 --- a/.trivyignore +++ b/.trivyignore @@ -1,2 +1,3 @@ # Accept the risk CVE-2025-0395 +CVE-2024-3596 diff --git a/tests/end_to_end/utils/constants.py b/tests/end_to_end/utils/constants.py index c3149a242c..0f5515a1eb 100644 --- a/tests/end_to_end/utils/constants.py +++ b/tests/end_to_end/utils/constants.py @@ -10,12 +10,15 @@ class ModelName(Enum): """ # IMP - The model name must be same (and in uppercase) as the model value. # This is used to identify the model in the tests. - TORCH_MNIST = "torch/mnist" + KERAS_JAX_MNIST = "keras/jax/mnist" KERAS_MNIST = "keras/mnist" + KERAS_TORCH_MNIST = "keras/torch/mnist" TORCH_HISTOLOGY = "torch/histology" + TORCH_MNIST = "torch/mnist" + TORCH_MNIST_EDEN_COMPRESSION = "torch/mnist_eden_compression" + TORCH_MNIST_STRAGGLER_CHECK = "torch/mnist_straggler_check" XGB_HIGGS = "xgb_higgs" - KERAS_TORCH_MNIST = "keras/torch/mnist" - KERAS_JAX_MNIST = "keras/jax/mnist" + NUM_COLLABORATORS = 2 NUM_ROUNDS = 5 diff --git a/tests/github/test_double_ws_export.py b/tests/github/test_double_ws_export.py deleted file mode 100644 index 07d2714ce8..0000000000 --- a/tests/github/test_double_ws_export.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (C) 2020-2023 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import os -import time -import argparse -from pathlib import Path -import shutil -from subprocess import check_call -from concurrent.futures import ProcessPoolExecutor -import psutil - -from tests.github.utils import create_certified_workspace, certify_aggregator, create_collaborator, is_path_name_allowed -from openfl.utilities.utils import getfqdn_env - - -def main(): - # Test the pipeline - parser = argparse.ArgumentParser() - workspace_choice = [] - with os.scandir('openfl-workspace') as iterator: - for entry in iterator: - if entry.name not in ['__init__.py', 'workspace', 'default']: - workspace_choice.append(entry.name) - parser.add_argument('--template', default='keras/mnist', choices=workspace_choice) - parser.add_argument('--fed_workspace', default='fed_work12345alpha81671') - parser.add_argument('--col1', default='one123dragons') - parser.add_argument('--col2', default='beta34unicorns') - parser.add_argument('--rounds-to-train') - parser.add_argument('--col1-data-path', default='1') - parser.add_argument('--col2-data-path', default='2') - - args = parser.parse_args() - fed_workspace = args.fed_workspace - - # Check if the path name is allowed before creating the workspace - if not is_path_name_allowed(fed_workspace): - print(f"The path name {fed_workspace} is not allowed") - return - - archive_name = f'{fed_workspace}.zip' - fqdn = getfqdn_env() - template = args.template - rounds_to_train = args.rounds_to_train - col1 = args.col1 - col1_data_path = args.col1_data_path - - # START - # ===== - # Make sure you are in a Python virtual environment with the FL package installed. - create_certified_workspace(fed_workspace, template, fqdn, rounds_to_train) - - certify_aggregator(fqdn) - - workspace_root = Path().resolve() # Get the absolute directory path for the workspace - - # Create collaborator #1 - create_collaborator(col1, workspace_root, col1_data_path, archive_name, fed_workspace) - - # Run the federation - with ProcessPoolExecutor(max_workers=3) as executor: - executor.submit(check_call, ['fx', 'aggregator', 'start'], cwd=workspace_root) - time.sleep(5) - - dir1 = workspace_root / col1 / fed_workspace - executor.submit(check_call, ['fx', 'collaborator', 'start', '-n', col1], cwd=dir1) - shutil.rmtree(dir1) - for proc in psutil.process_iter(): - if 'fx' in proc.name(): - proc.kill() - # Initialize FL plan - check_call(['fx', 'plan', 'initialize', '-a', fqdn]) - # Create certificate authority for workspace - check_call(['fx', 'workspace', 'certify']) - - # Export FL workspace - check_call(['fx', 'workspace', 'export']) - - certify_aggregator(fqdn) - - # Create collaborator #1 - create_collaborator(col1, workspace_root, col1_data_path, archive_name, fed_workspace) - # Run the federation - with ProcessPoolExecutor(max_workers=3) as executor: - executor.submit(check_call, ['fx', 'aggregator', 'start'], cwd=workspace_root) - time.sleep(5) - - dir1 = workspace_root / col1 / fed_workspace - executor.submit(check_call, ['fx', 'collaborator', 'start', '-n', col1], cwd=dir1) - shutil.rmtree(workspace_root) - - -if __name__ == '__main__': - main()