Skip to content

Commit

Permalink
fix ci after fix the confilict
Browse files Browse the repository at this point in the history
  • Loading branch information
dailai committed Apr 17, 2024
1 parent 2b0a92a commit 6358b5b
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@ public class HiveOnS3Conf extends S3Conf {
// The emr of amazon on s3 use this EmrFileSystem as the file system
protected static final String HDFS_S3_IMPL = "com.amazon.ws.emr.hadoop.fs.EmrFileSystem";

protected HiveOnS3Conf(String hdfsNameKey) {
protected HiveOnS3Conf(String hdfsNameKey, String schema) {
super(hdfsNameKey);
setSchema(schema);
}

@Override
Expand All @@ -44,9 +45,13 @@ protected String switchHdfsImpl() {
public static HadoopConf buildWithReadOnlyConfig(ReadonlyConfig readonlyConfig) {
S3Conf s3Conf = (S3Conf) S3Conf.buildWithReadOnlyConfig(readonlyConfig);
String bucketName = readonlyConfig.get(S3ConfigOptions.S3_BUCKET);
if (!bucketName.startsWith(S3A_SCHEMA) && !bucketName.startsWith(DEFAULT_SCHEMA)) {
if (bucketName.startsWith(DEFAULT_SCHEMA)) {
s3Conf.setSchema(DEFAULT_SCHEMA);
} else if (bucketName.startsWith(S3A_SCHEMA)) {
s3Conf.setSchema(S3A_SCHEMA);
} else {
s3Conf.setSchema(S3_SCHEMA);
}
return new HiveOnS3Conf(s3Conf.getHdfsNameKey());
return new HiveOnS3Conf(s3Conf.getHdfsNameKey(), s3Conf.getSchema());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,11 @@
package org.apache.seatunnel.connectors.seatunnel.hive.storage;

import org.apache.seatunnel.shade.com.typesafe.config.Config;
import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;

import org.apache.seatunnel.api.configuration.ReadonlyConfig;
import org.apache.seatunnel.connectors.seatunnel.file.config.HadoopConf;
import org.apache.seatunnel.connectors.seatunnel.file.s3.config.S3ConfigOptions;
import org.apache.seatunnel.connectors.seatunnel.hive.config.HiveOnS3Conf;

import org.apache.commons.lang3.StringUtils;
Expand All @@ -34,6 +36,17 @@ public class S3Storage extends AbstractStorage {
public HadoopConf buildHadoopConfWithReadOnlyConfig(ReadonlyConfig readonlyConfig) {
Configuration configuration = loadHiveBaseHadoopConfig(readonlyConfig);
Config config = fillBucket(readonlyConfig, configuration);
config =
config.withValue(
S3ConfigOptions.S3A_AWS_CREDENTIALS_PROVIDER.key(),
ConfigValueFactory.fromAnyRef(
configuration.get(
S3ConfigOptions.S3A_AWS_CREDENTIALS_PROVIDER.key())));
config =
config.withValue(
S3ConfigOptions.FS_S3A_ENDPOINT.key(),
ConfigValueFactory.fromAnyRef(
configuration.get(S3ConfigOptions.FS_S3A_ENDPOINT.key())));
HadoopConf hadoopConf =
HiveOnS3Conf.buildWithReadOnlyConfig(ReadonlyConfig.fromConfig(config));
Map<String, String> propsWithPrefix = configuration.getPropsWithPrefix(StringUtils.EMPTY);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@

import org.apache.seatunnel.api.configuration.ReadonlyConfig;
import org.apache.seatunnel.connectors.seatunnel.file.config.HadoopConf;
import org.apache.seatunnel.connectors.seatunnel.file.s3.config.S3Conf;
import org.apache.seatunnel.connectors.seatunnel.file.s3.config.S3ConfigOptions;
import org.apache.seatunnel.connectors.seatunnel.hive.config.HiveOnS3Conf;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
Expand All @@ -39,7 +40,33 @@ public class S3StorageTest {
"hive.hadoop.conf",
new HashMap<String, String>() {
{
put("bucket", "s3a://my_bucket");
put(S3ConfigOptions.S3_BUCKET.key(), "s3a://my_bucket");
put(
S3ConfigOptions.S3A_AWS_CREDENTIALS_PROVIDER
.key(),
"provider");
put(
S3ConfigOptions.FS_S3A_ENDPOINT.key(),
"http://s3.ap-northeast-1.amazonaws.com");
}
});
}
});

private static final ReadonlyConfig S3 =
ReadonlyConfig.fromMap(
new HashMap<String, Object>() {
{
put(
"hive.hadoop.conf",
new HashMap<String, String>() {
{
put(S3ConfigOptions.S3_BUCKET.key(), "s3://my_bucket");
put(
S3ConfigOptions.S3A_AWS_CREDENTIALS_PROVIDER
.key(),
"testProvider");
put(S3ConfigOptions.FS_S3A_ENDPOINT.key(), "test");
}
});
}
Expand All @@ -50,6 +77,12 @@ void fillBucketInHadoopConf() {
S3Storage s3Storage = new S3Storage();
HadoopConf s3aConf = s3Storage.buildHadoopConfWithReadOnlyConfig(S3A);
assertHadoopConfForS3a(s3aConf);

HadoopConf s3Conf = s3Storage.buildHadoopConfWithReadOnlyConfig(S3);
Assertions.assertTrue(s3Conf instanceof HiveOnS3Conf);
Assertions.assertEquals(s3Conf.getSchema(), "s3");
Assertions.assertEquals(
s3Conf.getFsHdfsImpl(), "com.amazon.ws.emr.hadoop.fs.EmrFileSystem");
}

@Test
Expand All @@ -66,7 +99,7 @@ void fillBucketInHadoopConfPath() throws URISyntaxException {
}

private void assertHadoopConfForS3a(HadoopConf s3aConf) {
Assertions.assertTrue(s3aConf instanceof S3Conf);
Assertions.assertTrue(s3aConf instanceof HiveOnS3Conf);
Assertions.assertEquals(s3aConf.getSchema(), "s3a");
Assertions.assertEquals(s3aConf.getFsHdfsImpl(), "org.apache.hadoop.fs.s3a.S3AFileSystem");
}
Expand Down

0 comments on commit 6358b5b

Please sign in to comment.