Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into cy-ts
Browse files Browse the repository at this point in the history
  • Loading branch information
ShaileshParmar11 committed Mar 6, 2024
2 parents 3e64a29 + 9491e04 commit cc42fbe
Show file tree
Hide file tree
Showing 38 changed files with 395 additions and 253 deletions.
58 changes: 58 additions & 0 deletions .github/actions/setup-openmetadata-test-environment/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
name: Setup OpenMetadata Test Environment
description: Steps needed to have a coherent test environment

inputs:
python-version:
description: Python Version to install
required: true

runs:
using: composite
steps:
# ---- Install Ubuntu Dependencies ---------------------------------------------
- name: Install Ubuntu dependencies
run: |
sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \
unixodbc-dev libevent-dev python3-dev libkrb5-dev
shell: bash
# ------------------------------------------------------------------------------

# ---- Setup Java --------------------------------------------------------------
- name: Setup JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
# ------------------------------------------------------------------------------

# ---- Setup Python Test Environment -------------------------------------------
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ inputs.python-version }}

- name: Generate Models
run: |
python3 -m venv env
source env/bin/activate
sudo make install_antlr_cli
make install_dev generate
shell: bash

- name: Install Python Dependencies
run: |
source env/bin/activate
make install_all install_test
shell: bash
# ------------------------------------------------------------------------------

# ---- Start OpenMetadata Server and ingest Sample Data ------------------------
- name: Start Server and Ingest Sample Data
uses: nick-fields/[email protected]
env:
INGESTION_DEPENDENCY: "mysql,elasticsearch"
with:
timeout_minutes: 60
max_attempts: 2
retry_on: error
command: ./docker/run_local_docker.sh -m no-ui
63 changes: 23 additions & 40 deletions .github/workflows/py-cli-e2e-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
input:
e2e-tests:
description: "E2E Tests to run"
type: string

permissions:
id-token: write
Expand All @@ -25,23 +29,23 @@ jobs:
strategy:
fail-fast: false
matrix:
e2e-test: ['bigquery', 'dbt_redshift', 'metabase', 'mssql', 'mysql', 'redash', 'snowflake', 'tableau', 'powerbi', 'vertica', 'python', 'redshift', 'quicksight', 'datalake_s3', 'postgres', 'oracle', 'athena', 'bigquery_multiple_project']
e2e-test: ${{ fromJSON(inputs.e2e-tests || '["bigquery", "dbt_redshift", "metabase", "mssql", "mysql", "redash", "snowflake", "tableau", "powerbi", "vertica", "python", "redshift", "quicksight", "datalake_s3", "postgres", "oracle", "athena", "bigquery_multiple_project"]') }}
environment: test

steps:
- name: Checkout
uses: actions/checkout@v3

- name: Set up JDK 17
uses: actions/setup-java@v3
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
java-version: '17'
distribution: 'temurin'
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: false
swap-storage: true
docker-images: false

- name: Set up Python 3.9
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Checkout
uses: actions/checkout@v3

- name: configure aws credentials
if: contains('quicksight', matrix.e2e-test) || contains('datalake_s3', matrix.e2e-test) || contains('athena', matrix.e2e-test)
Expand All @@ -51,32 +55,11 @@ jobs:
role-session-name: github-ci-aws-e2e-tests
aws-region: ${{ secrets.E2E_AWS_REGION }}

- name: Install Ubuntu dependencies
run: |
sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \
unixodbc-dev libevent-dev python3-dev libkrb5-dev
- name: Generate models
run: |
python3 -m venv env
source env/bin/activate
sudo make install_antlr_cli
make install_dev generate
- name: Install open-metadata dependencies
run: |
source env/bin/activate
make install_all install_test
- name: Start Server and Ingest Sample Data
uses: nick-fields/[email protected]
env:
INGESTION_DEPENDENCY: "mysql,elasticsearch"
- name: Setup Openmetadata Test Environment
uses: ./.github/actions/setup-openmetadata-test-environment
with:
timeout_minutes: 60
max_attempts: 2
retry_on: error
command: ./docker/run_local_docker.sh -m no-ui
python-version: 3.9


- name: Run Python Tests & record coverage
if: matrix.e2e-test == 'python'
Expand Down Expand Up @@ -160,7 +143,7 @@ jobs:
echo "except ImportError:" >> $SITE_CUSTOMIZE_PATH
echo " pass" >> $SITE_CUSTOMIZE_PATH
coverage run --rcfile ingestion/pyproject.toml -a --branch -m pytest -c ingestion/pyproject.toml --junitxml=ingestion/junit/test-results-$E2E_TEST.xml --ignore=ingestion/tests/unit/source ingestion/tests/cli_e2e/test_cli_$E2E_TEST.py
coverage combine --data-file=.coverage.$E2E_TEST --rcfile=ingestion/pyproject.toml --keep -a .coverage*
coverage combine --data-file=.coverage.$E2E_TEST --rcfile=ingestion/pyproject.toml --keep -a .coverage*
coverage report --rcfile ingestion/pyproject.toml --data-file .coverage.$E2E_TEST || true
- name: Upload coverage artifact for Python tests
Expand Down Expand Up @@ -239,11 +222,11 @@ jobs:

- name: Generate report
run: |
for folder in artifacts/coverage-*; do
for folder in artifacts/coverage-*; do
cp -rT $folder/ . ;
done
mkdir ingestion/junit
for folder in artifacts/tests-*; do
for folder in artifacts/tests-*; do
cp -rT $folder/ ingestion/junit ;
done
source env/bin/activate
Expand Down
54 changes: 11 additions & 43 deletions .github/workflows/py-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,14 @@ jobs:
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: false
swap-storage: true
docker-images: false
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: false
swap-storage: true
docker-images: false

- name: Wait for the labeler
uses: lewagon/[email protected]
if: ${{ github.event_name == 'pull_request_target' }}
Expand All @@ -73,43 +74,10 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0

- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'

- name: Set up Python ${{ matrix.py-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.py-version }}

- name: Install Ubuntu dependencies
run: |
sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \
unixodbc-dev libevent-dev python3-dev libkrb5-dev
- name: Generate models
run: |
python3 -m venv env
source env/bin/activate
sudo make install_antlr_cli
make install_dev generate
- name: Install open-metadata dependencies
run: |
source env/bin/activate
make install_all install_test
- name: Start Server and Ingest Sample Data
uses: nick-fields/[email protected]
env:
INGESTION_DEPENDENCY: "mysql,elasticsearch"
- name: Setup Openmetadata Test Environment
uses: ./.github/actions/setup-openmetadata-test-environment
with:
timeout_minutes: 60
max_attempts: 2
retry_on: error
command: ./docker/run_local_docker.sh -m no-ui
python-version: ${{ matrix.py-version}}

- name: Run Python Tests
if: ${{ matrix.py-version != '3.9' }}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- Update the relation between testDefinition and testCase to 0 (CONTAINS)
UPDATE entity_relationship
SET relation = 0
WHERE fromEntity = 'testDefinition' AND toEntity = 'testCase' AND relation != 0;

-- Update the test definition provider
-- If the test definition has OpenMetadata as a test platform, then the provider is system, else it is user
UPDATE test_definition
SET json = CASE
WHEN JSON_CONTAINS(json, '"OpenMetadata"', '$.testPlatforms') THEN JSON_INSERT(json,'$.provider','system')
ELSE JSON_INSERT(json,'$.provider','user')
END
;
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- Update the relation between testDefinition and testCase to 0 (CONTAINS)
UPDATE entity_relationship
SET relation = 0
WHERE fromEntity = 'testDefinition' AND toEntity = 'testCase' AND relation != 0;

-- Update the test definition provider
-- If the test definition has OpenMetadata as a test platform, then the provider is system, else it is user
UPDATE test_definition
SET json =
case
when json->'testPlatforms' @> '"OpenMetadata"' then jsonb_set(json,'{provider}','"system"',true)
else jsonb_set(json,'{provider}','"user"', true)
end;
Original file line number Diff line number Diff line change
@@ -1,13 +0,0 @@
-- Update the relation between testDefinition and testCase to 0 (CONTAINS)
UPDATE entity_relationship
SET relation = 0
WHERE fromEntity = 'testDefinition' AND toEntity = 'testCase' AND relation != 0;

-- Update the test definition provider
-- If the test definition has OpenMetadata as a test platform, then the provider is system, else it is user
UPDATE test_definition
SET json = CASE
WHEN JSON_CONTAINS(json, '"OpenMetadata"', '$.testPlatforms') THEN JSON_INSERT(json,'$.provider','system')
ELSE JSON_INSERT(json,'$.provider','user')
END
;
Original file line number Diff line number Diff line change
@@ -1,13 +0,0 @@
-- Update the relation between testDefinition and testCase to 0 (CONTAINS)
UPDATE entity_relationship
SET relation = 0
WHERE fromEntity = 'testDefinition' AND toEntity = 'testCase' AND relation != 0;

-- Update the test definition provider
-- If the test definition has OpenMetadata as a test platform, then the provider is system, else it is user
UPDATE test_definition
SET json =
case
when json->'testPlatforms' @> '"OpenMetadata"' then jsonb_set(json,'{provider}','"system"',true)
else jsonb_set(json,'{provider}','"user"', true)
end;
1 change: 0 additions & 1 deletion ingestion/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,6 @@
"druid": {"pydruid>=0.6.5"},
"dynamodb": {VERSIONS["boto3"]},
"elasticsearch": {
"elasticsearch==7.13.1",
VERSIONS["elasticsearch8"],
}, # also requires requests-aws4auth which is in base
"glue": {VERSIONS["boto3"]},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def get_table_partition_details(
)
]
)
return True, partition_details
return True, partition_details
return False, None

def process_additional_table_constraints(
Expand Down
6 changes: 6 additions & 0 deletions ingestion/tests/cli_e2e/common/test_cli_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,12 @@ def assert_for_table_with_profiler_time_partition(
if expected_column_profile:
column_profile = column.profile.dict()
for key in expected_column_profile: # type: ignore
if key == "nonParametricSkew":
self.assertTrue(
column_profile[key].__round__(10)
== expected_column_profile[key].__round__(10)
)
continue
self.assertTrue(
column_profile[key] == expected_column_profile[key]
)
Expand Down
1 change: 1 addition & 0 deletions ingestion/tests/cli_e2e/database/snowflake/snowflake.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ source:
type: Snowflake
connectionOptions: {}
connectionArguments: {}
clientSessionKeepAlive: True
sourceConfig:
config:
markDeletedTables: true
Expand Down
2 changes: 1 addition & 1 deletion ingestion/tests/cli_e2e/test_cli_metabase.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def expected_dashboards_and_charts(self) -> int:
return 6

def expected_lineage(self) -> int:
return 7
return 8

def expected_tags(self) -> int:
return 0
Expand Down
4 changes: 2 additions & 2 deletions ingestion/tests/cli_e2e/test_cli_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,12 @@ def expected_filtered_schema_excludes() -> int:

@staticmethod
def expected_filtered_table_includes() -> int:
return 67
return 78

@staticmethod
def expected_filtered_table_excludes() -> int:
return 4

@staticmethod
def expected_filtered_mix() -> int:
return 67
return 78
Loading

0 comments on commit cc42fbe

Please sign in to comment.