From 56e7a56f761a9e6ed99cf398f51dba3e834a07e7 Mon Sep 17 00:00:00 2001 From: "ankesh.maheshwari" Date: Tue, 5 Feb 2019 17:58:03 +0530 Subject: [PATCH 1/4] changing artifactory ip --- pom.xml | 8 ++++---- storage-api/pom.xml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 6a0b7a8abad6..01f5a906fda3 100644 --- a/pom.xml +++ b/pom.xml @@ -210,12 +210,12 @@ fk-art-snapshot libs-snapshot - http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local fk-art-release libs-rel - http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local @@ -224,12 +224,12 @@ fk-art-snapshot Flipkart-Artifactory - http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local fk-art-release Flipkart-Artifactory - http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local datanucleus diff --git a/storage-api/pom.xml b/storage-api/pom.xml index edbb84aaa65f..b678fe369d91 100644 --- a/storage-api/pom.xml +++ b/storage-api/pom.xml @@ -41,12 +41,12 @@ fk-art-snapshot libs-snapshot - http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local fk-art-release libs-rel - http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local + http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local From 2fe765d2ecc37e89fdc723d647f62c34610aec0e Mon Sep 17 00:00:00 2001 From: "ankesh.maheshwari" Date: Mon, 18 Mar 2019 17:02:47 +0530 Subject: [PATCH 2/4] adding struct type columns in input --- .../org/apache/hadoop/hive/ql/Driver.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 6910e507c6fb..0b5468734502 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -25,6 +25,7 @@ import java.io.Serializable; import java.net.InetAddress; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -120,7 +121,13 @@ import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ByteStream; +import org.apache.hadoop.hive.serde2.SerDeUtils; +import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.JobClient; @@ -756,11 +763,17 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S if (ss.isAuthorizationModeV2()) { // get mapping of tables to columns used ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo(); + + // add Struct type columns + addStructColumns(sem, colAccessInfo); + // colAccessInfo is set only in case of SemanticAnalyzer Map> selectTab2Cols = colAccessInfo != null ? colAccessInfo .getTableToColumnAccessMap() : null; + LOG.info("selectTab2Cols field Names : " + selectTab2Cols); Map> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ? sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null; + LOG.info("updateTab2Cols field Names : " + updateTab2Cols); doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols); return; } @@ -902,6 +915,32 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S } } + private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo colAccessInfo) { + TableDesc tableDesc = sem.getFetchTask().getTblDesc(); + String columnTypeProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMN_TYPES); + String columnNameProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMNS); + final String columnNameDelimiter = tableDesc.getProperties().containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableDesc.getProperties() + .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); + List columnNames; + if (columnNameProperty.length() == 0) { + columnNames = new ArrayList<>(); + } else { + columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); + } + List columnTypes; + if (columnTypeProperty.length() == 0) { + columnTypes = new ArrayList<>(); + } else { + columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); + } + StructTypeInfo rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); + LOG.info("Following are the struct field Names : " + rowTypeInfo.getAllStructFieldNames()); + for(String column : rowTypeInfo.getAllStructFieldNames()) { + LOG.info("Adding Column : " + column); + colAccessInfo.add(tableDesc.getTableName(), column); + } + } + private static void getTablePartitionUsedColumns(HiveOperation op, BaseSemanticAnalyzer sem, Map> tab2Cols, Map> part2Cols, Map tableUsePartLevelAuth) throws HiveException { From 769785a165c9f768d5e134045123e1b90b97498d Mon Sep 17 00:00:00 2001 From: "ankesh.maheshwari" Date: Tue, 19 Mar 2019 17:22:19 +0530 Subject: [PATCH 3/4] converting to first level column access --- .../org/apache/hadoop/hive/ql/Driver.java | 28 +++++++++++++++---- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 0b5468734502..c75074a2207a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -28,6 +28,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -38,6 +39,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FSDataInputStream; @@ -124,6 +127,7 @@ import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.SerDeUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -134,6 +138,7 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hive.common.util.ShutdownHookManager; +import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -734,7 +739,8 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, * @throws AuthorizationException */ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command) - throws HiveException, AuthorizationException { + throws HiveException, AuthorizationException, IOException + { SessionState ss = SessionState.get(); Hive db = sem.getDb(); @@ -915,7 +921,8 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S } } - private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo colAccessInfo) { + private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo colAccessInfo) throws IOException + { TableDesc tableDesc = sem.getFetchTask().getTblDesc(); String columnTypeProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMN_TYPES); String columnNameProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMNS); @@ -935,9 +942,20 @@ private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo } StructTypeInfo rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); LOG.info("Following are the struct field Names : " + rowTypeInfo.getAllStructFieldNames()); - for(String column : rowTypeInfo.getAllStructFieldNames()) { - LOG.info("Adding Column : " + column); - colAccessInfo.add(tableDesc.getTableName(), column); + + int cnt = 0; + for (TypeInfo value: rowTypeInfo.getAllStructFieldTypeInfos()) { + if(value.getCategory().equals(ObjectInspector.Category.STRUCT)) { + JSONObject jsnobject = new JSONObject(value); + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode node = objectMapper.readTree(jsnobject.get("allStructFieldNames").toString()); + Iterator file = node.elements(); + while (file.hasNext()) { + JsonNode al = file.next(); + colAccessInfo.add(tableDesc.getTableName(), rowTypeInfo.getAllStructFieldNames().get(cnt) + "." + al.textValue()); + } + } + cnt++; } } From b7fe1d9090531be1bb3e70dd7031da75ce84707d Mon Sep 17 00:00:00 2001 From: "ankesh.maheshwari" Date: Tue, 19 Mar 2019 18:22:12 +0530 Subject: [PATCH 4/4] replace . to @ --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index c75074a2207a..a63be4bd1740 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -952,7 +952,7 @@ private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo Iterator file = node.elements(); while (file.hasNext()) { JsonNode al = file.next(); - colAccessInfo.add(tableDesc.getTableName(), rowTypeInfo.getAllStructFieldNames().get(cnt) + "." + al.textValue()); + colAccessInfo.add(tableDesc.getTableName().replace('.','@'), rowTypeInfo.getAllStructFieldNames().get(cnt) + "." + al.textValue()); } } cnt++;