Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added large dataset test for import data file and provision to enable adaptive parallelism #1758

Merged
merged 9 commits into from
Oct 17, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 19 additions & 14 deletions migtests/scripts/functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -451,22 +451,27 @@ import_data_to_source_replica() {
}

import_data_file() {
yb-voyager import data file --export-dir ${EXPORT_DIR} \
--target-db-host ${TARGET_DB_HOST} \
--target-db-port ${TARGET_DB_PORT} \
--target-db-user ${TARGET_DB_USER} \
--target-db-password ${TARGET_DB_PASSWORD:-''} \
--target-db-schema ${TARGET_DB_SCHEMA:-''} \
--target-db-name ${TARGET_DB_NAME} \
--disable-pb true \
--send-diagnostics=false \
--parallel-jobs 3 \
$* || {
cat ${EXPORT_DIR}/metainfo/dataFileDescriptor.json
exit 1
}
args="
--export-dir ${EXPORT_DIR}
--target-db-host ${TARGET_DB_HOST}
--target-db-port ${TARGET_DB_PORT}
--target-db-user ${TARGET_DB_USER}
--target-db-password ${TARGET_DB_PASSWORD:-''}
--target-db-schema ${TARGET_DB_SCHEMA:-''}
--target-db-name ${TARGET_DB_NAME}
--disable-pb true
--send-diagnostics=false
"

# Check if RUN_USING_ADAPTIVE_PARALLELISM is true
if [ "${RUN_USING_ADAPTIVE_PARALLELISM}" = "true" ]; then
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we have this for import-data as well, so that we can test both the commands as and when require.

args="${args} --enable-adaptive-parallelism true"
fi

yb-voyager import data file ${args} $*
}


archive_changes() {
ENABLE=$(shuf -i 0-1 -n 1)
echo "archive changes ENABLE=${ENABLE}"
Expand Down
8 changes: 8 additions & 0 deletions migtests/tests/import-file/run-import-file-test
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,14 @@ main() {
step "Import data file from GCS (csv): t1_quote_escape_char1.csv -> gcs_quote_escape_char1"
import_data_file --data-dir "gs://voyager-automation-data" --format csv --delimiter '|' \
--file-table-map "t1_quote_escape_char1.csv:gcs_quote_escape_char1" --quote-char="'" --escape-char="'"


if [ "${RUN_LARGE_IMPORT_DATA_FILE_TEST}" = true ] ; then
priyanshi-yb marked this conversation as resolved.
Show resolved Hide resolved

step "Run large sized import data file test"
import_data_file --data-dir "s3://yb-voyager-test-data" --delimiter "\t" --format "text" --file-table-map "accounts_350m_data.sql:accounts_large" --start-clean true --yes

fi

step "Run validations."
"${TEST_DIR}/validate"
Expand Down
13 changes: 13 additions & 0 deletions migtests/tests/import-file/schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,19 @@ CREATE TABLE public.accounts (
PRIMARY KEY (block, address)
);

CREATE TABLE public.accounts_large (
block bigint NOT NULL,
address text NOT NULL,
dc_balance bigint DEFAULT 0 NOT NULL,
dc_nonce bigint DEFAULT 0 NOT NULL,
security_balance bigint DEFAULT 0 NOT NULL,
security_nonce bigint DEFAULT 0 NOT NULL,
balance bigint DEFAULT 0 NOT NULL,
nonce bigint DEFAULT 0 NOT NULL,
staked_balance bigint,
PRIMARY KEY (block, address)
);

create table t1_quote_char (i int, j timestamp, k bigint, l varchar(30));

create table t1_quote_escape_char1 (i int, j timestamp, k bigint, l varchar(30));
Expand Down
6 changes: 6 additions & 0 deletions migtests/tests/import-file/validate
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/usr/bin/env python3

import yb
import os

def main():
yb.run_checks(file_import_done_checks)
Expand Down Expand Up @@ -51,7 +52,12 @@ EXPECTED = {
"gcs_csv_with_header": 3,
"gcs_volume": 1000000,
"gcs_quote_escape_char1": 3,
"accounts_large": 0
}

if os.getenv("RUN_LARGE_IMPORT_DATA_FILE_TEST") == "true":
EXPECTED["accounts_large"] = 350000000

EXPECTED_NON_PUBLIC = {
"smsa": 60,
}
Expand Down
Loading