Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature][E2E][Kerberos] Support for Kerberos in e2e #8108

Merged
merged 12 commits into from
Nov 25, 2024
23 changes: 23 additions & 0 deletions docs/en/connector-v2/sink/Hive.md
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,29 @@ sink {
}
```

### example2: Kerberos

```bash
sink {
Hive {
table_name = "default.test_hive_sink_on_hdfs_with_kerberos"
metastore_uri = "thrift://metastore:9083"
hive_site_path = "/tmp/hive-site.xml"
kerberos_principal = "hive/[email protected]"
kerberos_keytab_path = "/tmp/hive.keytab"
krb5_path = "/tmp/krb5.conf"
}
}
```

Description:

- `hive_site_path`: The path to the `hive-site.xml` file.
- `kerberos_principal`: The principal for Kerberos authentication.
- `kerberos_keytab_path`: The keytab file path for Kerberos authentication.
- `krb5_path`: The path to the `krb5.conf` file used for Kerberos authentication.


## Hive on s3

### Step 1
Expand Down
24 changes: 24 additions & 0 deletions docs/en/connector-v2/source/Hive.md
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,30 @@ Source plugin common parameters, please refer to [Source Common Options](../sour

```

### Example3 : Kerberos

```bash
source {
Hive {
table_name = "default.test_hive_sink_on_hdfs_with_kerberos"
metastore_uri = "thrift://metastore:9083"
hive.hadoop.conf-path = "/tmp/hadoop"
result_table_name = hive_source
hive_site_path = "/tmp/hive-site.xml"
kerberos_principal = "hive/[email protected]"
kerberos_keytab_path = "/tmp/hive.keytab"
krb5_path = "/tmp/krb5.conf"
}
}
```

Description:

- `hive_site_path`: The path to the `hive-site.xml` file.
- `kerberos_principal`: The principal for Kerberos authentication.
- `kerberos_keytab_path`: The keytab file path for Kerberos authentication.
- `krb5_path`: The path to the `krb5.conf` file used for Kerberos authentication.

## Hive on s3

### Step 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,13 @@

package org.apache.seatunnel.e2e.connector.hive;

import org.apache.seatunnel.e2e.common.util.ContainerUtil;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.security.UserGroupInformation;

import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
Expand All @@ -28,6 +32,7 @@
import org.testcontainers.utility.DockerImageName;
import org.testcontainers.utility.DockerLoggerFactory;

import java.io.IOException;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.SQLException;
Expand Down Expand Up @@ -70,24 +75,58 @@ public String getMetastoreUri() {
return String.format("thrift://%s:%s", getHost(), getMappedPort(HMS_PORT));
}

public String getHiveJdbcUri() {
return String.format(
"jdbc:hive2://%s:%s/default", getHost(), getMappedPort(HIVE_SERVER_PORT));
public String getHiveJdbcUri(boolean enableKerberos) {
if (enableKerberos) {
return String.format(
"jdbc:hive2://%s:%s/default;principal=hive/[email protected]",
getHost(), getMappedPort(HIVE_SERVER_PORT));
} else {
return String.format(
"jdbc:hive2://%s:%s/default", getHost(), getMappedPort(HIVE_SERVER_PORT));
}
}

public HiveMetaStoreClient createMetaStoreClient() throws MetaException {
return this.createMetaStoreClient(false);
}

public HiveMetaStoreClient createMetaStoreClient(boolean enableKerberos) throws MetaException {
HiveConf conf = new HiveConf();
conf.set("hive.metastore.uris", getMetastoreUri());

if (enableKerberos) {
conf.addResource("kerberos/hive-site.xml");
}
return new HiveMetaStoreClient(conf);
}

public Connection getConnection()
throws ClassNotFoundException, InstantiationException, IllegalAccessException,
SQLException {
Driver driver = loadHiveJdbcDriver();
return getConnection(false);
}

return driver.connect(getHiveJdbcUri(), getJdbcConnectionConfig());
public Connection getConnection(boolean enableKerberos)
throws ClassNotFoundException, InstantiationException, IllegalAccessException,
SQLException {
Driver driver = loadHiveJdbcDriver();
if (!enableKerberos) {
return driver.connect(getHiveJdbcUri(false), getJdbcConnectionConfig());
}
Configuration authConf = new Configuration();
authConf.set("hadoop.security.authentication", "kerberos");
Configuration configuration = new Configuration();
System.setProperty(
"java.security.krb5.conf",
ContainerUtil.getResourcesFile("/kerberos/krb5_local.conf").getPath());
configuration.set("hadoop.security.authentication", "KERBEROS");
try {
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(
"hive/[email protected]", "/tmp/hive.keytab");
} catch (IOException e) {
throw new RuntimeException(e);
}
return driver.connect(getHiveJdbcUri(true), getJdbcConnectionConfig());
}

public Driver loadHiveJdbcDriver()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ public void startUp() throws Exception {
.await()
.atMost(360, TimeUnit.SECONDS)
.pollDelay(Duration.ofSeconds(10L))
.pollInterval(Duration.ofSeconds(3L))
.untilAsserted(this::initializeConnection);
prepareTable();
}
Expand Down
Loading
Loading