Skip to content
Open

Dummy #6453

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
Expand Down Expand Up @@ -232,7 +231,7 @@ static VectorizedRowBatchCtx createFakeVrbCtx(MapWork mapWork) throws HiveExcept
if (paths.hasNext()) {
PartitionDesc partDesc = mapWork.getPathToPartitionInfo().get(paths.next());
if (partDesc != null) {
LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
Map<String, String> partSpec = partDesc.getPartSpec();
if (partSpec != null && !partSpec.isEmpty()) {
partitionColumnCount = partSpec.size();
}
Expand Down
32 changes: 16 additions & 16 deletions ql/src/java/org/apache/hadoop/hive/llap/ProactiveEviction.java
Original file line number Diff line number Diff line change
Expand Up @@ -158,13 +158,13 @@ public static final class Request {

// Holds a hierarchical structure of DBs, tables and partitions such as:
// { testdb : { testtab0 : [], testtab1 : [ {pk0 : p0v0, pk1 : p0v1}, {pk0 : p1v0, pk1 : p1v1} ] }, testdb2 : {} }
private final Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities;
private final Map<String, Map<String, Set<Map<String, String>>>> entities;

private Request(Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities) {
private Request(Map<String, Map<String, Set<Map<String, String>>>> entities) {
this.entities = entities;
}

public Map<String, Map<String, Set<LinkedHashMap<String, String>>>> getEntities() {
public Map<String, Map<String, Set<Map<String, String>>>> getEntities() {
return entities;
}

Expand All @@ -191,21 +191,21 @@ public List<LlapDaemonProtocolProtos.EvictEntityRequestProto> toProtoRequests()

List<LlapDaemonProtocolProtos.EvictEntityRequestProto> protoRequests = new LinkedList<>();

for (Map.Entry<String, Map<String, Set<LinkedHashMap<String, String>>>> dbEntry : entities.entrySet()) {
for (Map.Entry<String, Map<String, Set<Map<String, String>>>> dbEntry : entities.entrySet()) {
String dbName = dbEntry.getKey();
Map<String, Set<LinkedHashMap<String, String>>> tables = dbEntry.getValue();
Map<String, Set<Map<String, String>>> tables = dbEntry.getValue();

LlapDaemonProtocolProtos.EvictEntityRequestProto.Builder requestBuilder =
LlapDaemonProtocolProtos.EvictEntityRequestProto.newBuilder();
LlapDaemonProtocolProtos.TableProto.Builder tableBuilder = null;

requestBuilder.setDbName(dbName.toLowerCase());
for (Map.Entry<String, Set<LinkedHashMap<String, String>>> tableEntry : tables.entrySet()) {
for (Map.Entry<String, Set<Map<String, String>>> tableEntry : tables.entrySet()) {
String tableName = tableEntry.getKey();
tableBuilder = LlapDaemonProtocolProtos.TableProto.newBuilder();
tableBuilder.setTableName(tableName.toLowerCase());

Set<LinkedHashMap<String, String>> partitions = tableEntry.getValue();
Set<Map<String, String>> partitions = tableEntry.getValue();
Set<String> partitionKeys = null;

for (Map<String, String> partitionSpec : partitions) {
Expand Down Expand Up @@ -245,7 +245,7 @@ public boolean isTagMatch(CacheTag cacheTag) {
return false;
}

Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(db);
Map<String, Set<Map<String, String>>> tables = entities.get(db);

// If true, must be a drop DB event and this cacheTag matches.
if (tables.isEmpty()) {
Expand All @@ -261,7 +261,7 @@ public boolean isTagMatch(CacheTag cacheTag) {
for (String tableAndDbName : tables.keySet()) {
if (tableAndDbName.equals(tagTableName.getNotEmptyDbTable())) {

Set<LinkedHashMap<String, String>> partDescs = tables.get(tableAndDbName);
Set<Map<String, String>> partDescs = tables.get(tableAndDbName);

// If true, must be a drop table event, and this cacheTag matches.
if (partDescs == null) {
Expand Down Expand Up @@ -292,7 +292,7 @@ public String toString() {
*/
public static final class Builder {

private final Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities;
private final Map<String, Map<String, Set<Map<String, String>>>> entities;

private Builder() {
this.entities = new HashMap<>();
Expand All @@ -302,7 +302,7 @@ public static Builder create() {
return new Builder();
}

public Builder addPartitionOfATable(String db, String tableName, LinkedHashMap<String, String> partSpec) {
public Builder addPartitionOfATable(String db, String tableName, Map<String, String> partSpec) {
ensureDb(db);
ensureTable(db, tableName);
entities.get(db).get(tableName).add(partSpec);
Expand All @@ -325,7 +325,7 @@ public Request build() {
}

private void ensureDb(String dbName) {
Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(dbName);
Map<String, Set<Map<String, String>>> tables = entities.get(dbName);
if (tables == null) {
tables = new HashMap<>();
entities.put(dbName, tables);
Expand All @@ -334,9 +334,9 @@ private void ensureDb(String dbName) {

private void ensureTable(String dbName, String tableName) {
ensureDb(dbName);
Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(dbName);
Map<String, Set<Map<String, String>>> tables = entities.get(dbName);

Set<LinkedHashMap<String, String>> partitions = tables.get(tableName);
Set<Map<String, String>> partitions = tables.get(tableName);
if (partitions == null) {
partitions = new HashSet<>();
tables.put(tableName, partitions);
Expand All @@ -352,7 +352,7 @@ public Builder fromProtoRequest(LlapDaemonProtocolProtos.EvictEntityRequestProto
entities.clear();
String dbName = protoRequest.getDbName().toLowerCase();

Map<String, Set<LinkedHashMap<String, String>>> entitiesInDb = new HashMap<>();
Map<String, Set<Map<String, String>>> entitiesInDb = new HashMap<>();
List<LlapDaemonProtocolProtos.TableProto> tables = protoRequest.getTableList();

if (tables != null && !tables.isEmpty()) {
Expand All @@ -364,7 +364,7 @@ public Builder fromProtoRequest(LlapDaemonProtocolProtos.EvictEntityRequestProto
entitiesInDb.put(dbAndTableName, null);
continue;
}
Set<LinkedHashMap<String, String>> partitions = new HashSet<>();
Set<Map<String, String>> partitions = new HashSet<>();
LinkedHashMap<String, String> partDesc = new LinkedHashMap<>();

for (int valIx = 0; valIx < table.getPartValCount(); ++valIx) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,7 @@ private List<FieldSchema> getColumnsByPattern() throws HiveException {

private List<FieldSchema> getCols() throws HiveException {
Table table = context.getDb().getTable(desc.getTableName());
List<FieldSchema> allColumns = new ArrayList<>();
allColumns.addAll(table.getCols());
allColumns.addAll(table.getPartCols());
return allColumns;
return new ArrayList<>(table.getAllCols());
}

private Matcher getMatcher() {
Expand All @@ -94,13 +91,7 @@ private List<FieldSchema> filterColumns(List<FieldSchema> columns, Matcher match
}

if (desc.isSorted()) {
result.sort(
new Comparator<FieldSchema>() {
@Override
public int compare(FieldSchema f1, FieldSchema f2) {
return f1.getName().compareTo(f2.getName());
}
});
result.sort(Comparator.comparing(FieldSchema::getName));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@

import java.io.DataOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
Expand All @@ -70,7 +69,6 @@
import java.util.Set;
import java.util.TreeMap;
import java.util.Map.Entry;
import java.util.stream.Collectors;

import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS;
import static org.apache.hadoop.hive.ql.ddl.ShowUtils.ALIGNMENT;
Expand Down Expand Up @@ -171,13 +169,7 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
boolean isFormatted, boolean isOutputPadded) throws IOException {
String partitionData = "";
if (columnPath == null) {
List<FieldSchema> partitionColumns = null;
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
if (table.isPartitioned()) {
partitionColumns = table.hasNonNativePartitionSupport() ?
table.getStorageHandler().getPartitionKeys(table) :
table.getPartCols();
}
List<FieldSchema> partitionColumns = table.isPartitioned() ? table.getPartCols() : null;
if (CollectionUtils.isNotEmpty(partitionColumns) &&
conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {
TextMetaDataTable metaDataTable = new TextMetaDataTable();
Expand All @@ -204,13 +196,9 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
}

private void addFormattedTableData(DataOutputStream out, Table table, Partition partition, boolean isOutputPadded)
throws IOException, UnsupportedEncodingException {
String formattedTableInfo = null;
if (partition != null) {
formattedTableInfo = getPartitionInformation(table, partition);
} else {
formattedTableInfo = getTableInformation(table, isOutputPadded);
}
throws IOException {
String formattedTableInfo = (partition != null) ? getPartitionInformation(table, partition) :
getTableInformation(table, isOutputPadded);

if (table.getTableConstraintsInfo().isTableConstraintsInfoNotEmpty()) {
formattedTableInfo += getConstraintsInformation(table);
Expand Down Expand Up @@ -335,24 +323,24 @@ private void getStorageDescriptorInfo(StringBuilder tableInfo, Table table, Stor
List<String> skewedCoumnNames =
storageDesc.getSkewedInfo().getSkewedColNames().stream()
.sorted()
.collect(Collectors.toList());
.toList();
formatOutput("Skewed Columns:", skewedCoumnNames.toString(), tableInfo);
}

if (CollectionUtils.isNotEmpty(storageDesc.getSkewedInfo().getSkewedColValues())) {
List<List<String>> skewedColumnValues =
storageDesc.getSkewedInfo().getSkewedColValues().stream()
.sorted(new VectorComparator<String>())
.collect(Collectors.toList());
.toList();
formatOutput("Skewed Values:", skewedColumnValues.toString(), tableInfo);
}

Map<List<String>, String> skewedColMap = new TreeMap<>(new VectorComparator<String>());
Map<List<String>, String> skewedColMap = new TreeMap<>(new VectorComparator<>());
skewedColMap.putAll(storageDesc.getSkewedInfo().getSkewedColValueLocationMaps());
if (MapUtils.isNotEmpty(skewedColMap)) {
formatOutput("Skewed Value to Path:", skewedColMap.toString(), tableInfo);
Map<List<String>, String> truncatedSkewedColMap =
new TreeMap<List<String>, String>(new VectorComparator<String>());
new TreeMap<>(new VectorComparator<>());
// walk through existing map to truncate path so that test won't mask it then we can verify location is right
Set<Entry<List<String>, String>> entries = skewedColMap.entrySet();
for (Entry<List<String>, String> entry : entries) {
Expand Down Expand Up @@ -401,7 +389,7 @@ private void getPartitionMetaDataInformation(StringBuilder tableInfo, Partition
}
}

private class VectorComparator<T extends Comparable<T>> implements Comparator<List<T>>{
private static final class VectorComparator<T extends Comparable<T>> implements Comparator<List<T>> {
@Override
public int compare(List<T> listA, List<T> listB) {
for (int i = 0; i < listA.size() && i < listB.size(); i++) {
Expand Down Expand Up @@ -436,7 +424,7 @@ private void displayAllParameters(Map<String, String> params, StringBuilder tabl

private void displayAllParameters(Map<String, String> params, StringBuilder tableInfo, boolean escapeUnicode,
boolean isOutputPadded) {
List<String> keys = new ArrayList<String>(params.keySet());
List<String> keys = new ArrayList<>(params.keySet());
Collections.sort(keys);
for (String key : keys) {
String value = params.get(key);
Expand Down Expand Up @@ -624,7 +612,7 @@ private void addExtendedTableData(DataOutputStream out, Table table, Partition p
}

private void addExtendedConstraintData(DataOutputStream out, Table table)
throws IOException, UnsupportedEncodingException {
throws IOException {
if (table.getTableConstraintsInfo().isTableConstraintsInfoNotEmpty()) {
out.write(("Constraints").getBytes(StandardCharsets.UTF_8));
out.write(Utilities.tabCode);
Expand Down Expand Up @@ -656,7 +644,7 @@ private void addExtendedConstraintData(DataOutputStream out, Table table)
}

private void addExtendedStorageData(DataOutputStream out, Table table)
throws IOException, UnsupportedEncodingException {
throws IOException {
if (table.getStorageHandlerInfo() != null) {
out.write(("StorageHandlerInfo").getBytes(StandardCharsets.UTF_8));
out.write(Utilities.newLineCode);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
Expand Down
3 changes: 1 addition & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -254,7 +253,7 @@ public static String getPartialName(Partition p, int level) throws HiveException
* @throws HiveException
*/
public static String conflictingArchiveNameOrNull(Hive db, Table tbl,
LinkedHashMap<String, String> partSpec)
Map<String, String> partSpec)
throws HiveException {

List<FieldSchema> partKeys = tbl.getPartitionKeys();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.IntStream;

Expand Down Expand Up @@ -287,7 +286,7 @@ public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx,
public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx, PartitionDesc partDesc,
Object[] partitionValues) {

LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
Map<String, String> partSpec = partDesc.getPartSpec();

for (int i = 0; i < vrbCtx.partitionColumnCount; i++) {
Object objectValue;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,7 @@ public List<String> getValues() {
Table table = this.getTable();
values = new ArrayList<>();

// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
for (FieldSchema fs : table.hasNonNativePartitionSupport()
? table.getStorageHandler().getPartitionKeys(table)
: table.getPartCols()) {
for (FieldSchema fs : table.getPartCols()) {
String val = partSpec.get(fs.getName());
values.add(val);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
Expand Down Expand Up @@ -430,10 +429,10 @@ private static RelNode createMaterializedViewScan(HiveConf conf, Table viewTable

// 1.2 Add column info corresponding to partition columns
ArrayList<ColumnInfo> partitionColumns = new ArrayList<ColumnInfo>();
for (FieldSchema part_col : viewTable.getPartCols()) {
colName = part_col.getName();
for (FieldSchema partCol : viewTable.getPartCols()) {
colName = partCol.getName();
colInfo = new ColumnInfo(colName,
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), null, true);
TypeInfoFactory.getPrimitiveTypeInfo(partCol.getType()), null, true);
rr.put(null, colName, colInfo);
cInfoLst.add(colInfo);
partitionColumns.add(colInfo);
Expand Down
Loading
Loading