Skip to content

Commit

Permalink
[feature] [seatunnel-example-2.3.7] 提交测试文件
Browse files Browse the repository at this point in the history
  • Loading branch information
LeonYoah committed Oct 23, 2024
1 parent 40b2f3e commit 9723f01
Show file tree
Hide file tree
Showing 4 changed files with 161 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
env {
job.mode = "batch"
parallelism = "1"
job.retry.times = "0"
job.name = "aace8bb9f8864562b0264ea75e3991f5"
checkpoint.interval = "30000"
}

source {
Kafka {

json_field = {
#"key1" = "$.Slice[*].Type"
"key2" = "$.Slice[*].Req.MotorVehicleListObject.MotorVehicleObject[*].Direction"
"key3" = "$.Slice[*].Req.MotorVehicleListObject.MotorVehicleObject[*].DisappearTime"
"key4" = "$.Slice[*].Req.MotorVehicleListObject.MotorVehicleObject[*].HasPlate"
#"key5" = "$.Slice[*].Req.MotorVehicleListObject.MotorVehicleObject[*].SubImageList"
}
schema = {
fields = {
#key1 = "string"
key2 = "string"
key3 = "string"
key4 = "string"
#key5 = "string"
}
}
format = "JSON"
bootstrap.servers = "10.28.23.152:6667"
format_error_handle_way = "skip"
topic = "test-big"
handler_field_name = "Req"
consumer.group = "11112"
#semantics = EXACTLY_ONCE
start_mode = "group_offsets"
result_table_name = "tmp_table_kafka_31ad8"
}
}


sink {
Hive {
source_table_name = "tmp_table_kafka_31ad8"
table_name = "test.test3"
metastore_uri = "thrift://master:9083"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
env {
job.mode = "STREAMING"
parallelism = "1"
job.retry.times = "0"
checkpoint.interval = "30000"
}
source {
Kafka {
topic = "testConn"
bootstrap.servers = "10.28.12.3:9092"
consumer.group = "test_kafka_error_lyb"
commit_on_checkpoint = "false"
# kafka.config {
# auto.offset.reset = "earliest"
# }
schema = {
fields = {
id = "int"
name = "string"
age = "int"
}
}
format = "json"
result_table_name = "t1_zjh_json"
semantics = "EXACTLY_ONCE"
start_mode = "earliest"
json_field = {
id = "$.id"
name = "$.name"
age = "$.age"
}
# handler_field_name = "Req"
}
}
sink {
Hive {
source_table_name = "t1_zjh_json"
table_name = "test_zjh.t1_zjh"
metastore_uri = "thrift://10.28.23.152:9083"
tmp_path = "/tmp"
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
env {
job.mode = "BATCH"
parallelism = "1"
job.retry.times = "0"
job.name = "1837085933850710017"
}

source {
S3File {
fs.s3a.endpoint = "http://10.28.23.110:9010"
fs.s3a.aws.credentials.provider = "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"
access_key = "admin"
secret_key = "12345678"
bucket = "s3a://xugurtp"
path = "/tmp/example_data.json"
delimiter = ""
file_format_type = "json"
result_table_name = "tmp_table_file_40674"
xml_use_attr_format = ""
xml_row_tag = ""
sheet_name = ""
encoding = "UTF-8"
schema = {"fields":{"a":"string","s":"string","d":"string","f":"string","g":"timestamp","j":"string","k":"string","l":"string"}}
json_field = {
a = "$.rows[*].workspaceId"
s = "$.rows[*].workspaceName"
d = "$.rows[*].description"
f = "$.rows[*].createBy"
g = "$.rows[*].createTime"
j = "$.rows[*].status"
k = "$.rows[*].tenantId"
l = "$.rows[*].tenantName"
}
}
}

transform {
}

sink {
Console {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
env {
execution.parallelism = 1
job.mode = "BATCH"
}

source {
Http {
result_table_name = "token"
json_field = {"data":"$.data"}
schema = {"fields":{"data":"string"}}
url = "http://10.28.23.152:18080/demo/token"
method = "POST"
format = "json"
}
}
sink {
S3File {
path = "/xugurtp/seatunnel/tmp/1833394734322094082/20240910151021"
bucket = "s3a://xugurtp"
fs.s3a.endpoint = "http://10.28.23.110:9010"
fs.s3a.aws.credentials.provider = "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider"
access_key = "1OkI53dJYgOJODLfUoQg"
secret_key = "V4ROTtoJTaLb0UI9VfgA6ZJM2FaNZBsIaBJyrtNW"
custom_filename = true
file_name_expression = "output_params"
file_format_type = "json"
is_enable_transaction = false
sink_columns = ["data"]
}
}

0 comments on commit 9723f01

Please sign in to comment.