Skip to content

Commit bff3ae7

Browse files
committed
refractor-2
1 parent 40548fa commit bff3ae7

File tree

16 files changed

+109
-142
lines changed

16 files changed

+109
-142
lines changed

llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,7 @@ static VectorizedRowBatchCtx createFakeVrbCtx(MapWork mapWork) throws HiveExcept
232232
if (paths.hasNext()) {
233233
PartitionDesc partDesc = mapWork.getPathToPartitionInfo().get(paths.next());
234234
if (partDesc != null) {
235-
LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
235+
Map<String, String> partSpec = partDesc.getPartSpec();
236236
if (partSpec != null && !partSpec.isEmpty()) {
237237
partitionColumnCount = partSpec.size();
238238
}

ql/src/java/org/apache/hadoop/hive/llap/ProactiveEviction.java

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -158,13 +158,13 @@ public static final class Request {
158158

159159
// Holds a hierarchical structure of DBs, tables and partitions such as:
160160
// { testdb : { testtab0 : [], testtab1 : [ {pk0 : p0v0, pk1 : p0v1}, {pk0 : p1v0, pk1 : p1v1} ] }, testdb2 : {} }
161-
private final Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities;
161+
private final Map<String, Map<String, Set<Map<String, String>>>> entities;
162162

163-
private Request(Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities) {
163+
private Request(Map<String, Map<String, Set<Map<String, String>>>> entities) {
164164
this.entities = entities;
165165
}
166166

167-
public Map<String, Map<String, Set<LinkedHashMap<String, String>>>> getEntities() {
167+
public Map<String, Map<String, Set<Map<String, String>>>> getEntities() {
168168
return entities;
169169
}
170170

@@ -191,21 +191,21 @@ public List<LlapDaemonProtocolProtos.EvictEntityRequestProto> toProtoRequests()
191191

192192
List<LlapDaemonProtocolProtos.EvictEntityRequestProto> protoRequests = new LinkedList<>();
193193

194-
for (Map.Entry<String, Map<String, Set<LinkedHashMap<String, String>>>> dbEntry : entities.entrySet()) {
194+
for (Map.Entry<String, Map<String, Set<Map<String, String>>>> dbEntry : entities.entrySet()) {
195195
String dbName = dbEntry.getKey();
196-
Map<String, Set<LinkedHashMap<String, String>>> tables = dbEntry.getValue();
196+
Map<String, Set<Map<String, String>>> tables = dbEntry.getValue();
197197

198198
LlapDaemonProtocolProtos.EvictEntityRequestProto.Builder requestBuilder =
199199
LlapDaemonProtocolProtos.EvictEntityRequestProto.newBuilder();
200200
LlapDaemonProtocolProtos.TableProto.Builder tableBuilder = null;
201201

202202
requestBuilder.setDbName(dbName.toLowerCase());
203-
for (Map.Entry<String, Set<LinkedHashMap<String, String>>> tableEntry : tables.entrySet()) {
203+
for (Map.Entry<String, Set<Map<String, String>>> tableEntry : tables.entrySet()) {
204204
String tableName = tableEntry.getKey();
205205
tableBuilder = LlapDaemonProtocolProtos.TableProto.newBuilder();
206206
tableBuilder.setTableName(tableName.toLowerCase());
207207

208-
Set<LinkedHashMap<String, String>> partitions = tableEntry.getValue();
208+
Set<Map<String, String>> partitions = tableEntry.getValue();
209209
Set<String> partitionKeys = null;
210210

211211
for (Map<String, String> partitionSpec : partitions) {
@@ -245,7 +245,7 @@ public boolean isTagMatch(CacheTag cacheTag) {
245245
return false;
246246
}
247247

248-
Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(db);
248+
Map<String, Set<Map<String, String>>> tables = entities.get(db);
249249

250250
// If true, must be a drop DB event and this cacheTag matches.
251251
if (tables.isEmpty()) {
@@ -261,7 +261,7 @@ public boolean isTagMatch(CacheTag cacheTag) {
261261
for (String tableAndDbName : tables.keySet()) {
262262
if (tableAndDbName.equals(tagTableName.getNotEmptyDbTable())) {
263263

264-
Set<LinkedHashMap<String, String>> partDescs = tables.get(tableAndDbName);
264+
Set<Map<String, String>> partDescs = tables.get(tableAndDbName);
265265

266266
// If true, must be a drop table event, and this cacheTag matches.
267267
if (partDescs == null) {
@@ -292,7 +292,7 @@ public String toString() {
292292
*/
293293
public static final class Builder {
294294

295-
private final Map<String, Map<String, Set<LinkedHashMap<String, String>>>> entities;
295+
private final Map<String, Map<String, Set<Map<String, String>>>> entities;
296296

297297
private Builder() {
298298
this.entities = new HashMap<>();
@@ -302,7 +302,7 @@ public static Builder create() {
302302
return new Builder();
303303
}
304304

305-
public Builder addPartitionOfATable(String db, String tableName, LinkedHashMap<String, String> partSpec) {
305+
public Builder addPartitionOfATable(String db, String tableName, Map<String, String> partSpec) {
306306
ensureDb(db);
307307
ensureTable(db, tableName);
308308
entities.get(db).get(tableName).add(partSpec);
@@ -325,7 +325,7 @@ public Request build() {
325325
}
326326

327327
private void ensureDb(String dbName) {
328-
Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(dbName);
328+
Map<String, Set<Map<String, String>>> tables = entities.get(dbName);
329329
if (tables == null) {
330330
tables = new HashMap<>();
331331
entities.put(dbName, tables);
@@ -334,9 +334,9 @@ private void ensureDb(String dbName) {
334334

335335
private void ensureTable(String dbName, String tableName) {
336336
ensureDb(dbName);
337-
Map<String, Set<LinkedHashMap<String, String>>> tables = entities.get(dbName);
337+
Map<String, Set<Map<String, String>>> tables = entities.get(dbName);
338338

339-
Set<LinkedHashMap<String, String>> partitions = tables.get(tableName);
339+
Set<Map<String, String>> partitions = tables.get(tableName);
340340
if (partitions == null) {
341341
partitions = new HashSet<>();
342342
tables.put(tableName, partitions);
@@ -352,7 +352,7 @@ public Builder fromProtoRequest(LlapDaemonProtocolProtos.EvictEntityRequestProto
352352
entities.clear();
353353
String dbName = protoRequest.getDbName().toLowerCase();
354354

355-
Map<String, Set<LinkedHashMap<String, String>>> entitiesInDb = new HashMap<>();
355+
Map<String, Set<Map<String, String>>> entitiesInDb = new HashMap<>();
356356
List<LlapDaemonProtocolProtos.TableProto> tables = protoRequest.getTableList();
357357

358358
if (tables != null && !tables.isEmpty()) {
@@ -364,7 +364,7 @@ public Builder fromProtoRequest(LlapDaemonProtocolProtos.EvictEntityRequestProto
364364
entitiesInDb.put(dbAndTableName, null);
365365
continue;
366366
}
367-
Set<LinkedHashMap<String, String>> partitions = new HashSet<>();
367+
Set<Map<String, String>> partitions = new HashSet<>();
368368
LinkedHashMap<String, String> partDesc = new LinkedHashMap<>();
369369

370370
for (int valIx = 0; valIx < table.getPartValCount(); ++valIx) {

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/column/show/ShowColumnsOperation.java

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -91,13 +91,7 @@ private List<FieldSchema> filterColumns(List<FieldSchema> columns, Matcher match
9191
}
9292

9393
if (desc.isSorted()) {
94-
result.sort(
95-
new Comparator<FieldSchema>() {
96-
@Override
97-
public int compare(FieldSchema f1, FieldSchema f2) {
98-
return f1.getName().compareTo(f2.getName());
99-
}
100-
});
94+
result.sort(Comparator.comparing(FieldSchema::getName));
10195
}
10296
return result;
10397
}

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/formatter/TextDescTableFormatter.java

Lines changed: 12 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@
5959

6060
import java.io.DataOutputStream;
6161
import java.io.IOException;
62-
import java.io.UnsupportedEncodingException;
6362
import java.nio.charset.StandardCharsets;
6463
import java.util.ArrayList;
6564
import java.util.Collections;
@@ -70,7 +69,6 @@
7069
import java.util.Set;
7170
import java.util.TreeMap;
7271
import java.util.Map.Entry;
73-
import java.util.stream.Collectors;
7472

7573
import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS;
7674
import static org.apache.hadoop.hive.ql.ddl.ShowUtils.ALIGNMENT;
@@ -171,11 +169,7 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
171169
boolean isFormatted, boolean isOutputPadded) throws IOException {
172170
String partitionData = "";
173171
if (columnPath == null) {
174-
List<FieldSchema> partitionColumns = null;
175-
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
176-
if (table.isPartitioned()) {
177-
partitionColumns = table.getEffectivePartCols();
178-
}
172+
List<FieldSchema> partitionColumns = table.isPartitioned() ? table.getEffectivePartCols() : null;
179173
if (CollectionUtils.isNotEmpty(partitionColumns) &&
180174
conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {
181175
TextMetaDataTable metaDataTable = new TextMetaDataTable();
@@ -202,13 +196,9 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
202196
}
203197

204198
private void addFormattedTableData(DataOutputStream out, Table table, Partition partition, boolean isOutputPadded)
205-
throws IOException, UnsupportedEncodingException {
206-
String formattedTableInfo = null;
207-
if (partition != null) {
208-
formattedTableInfo = getPartitionInformation(table, partition);
209-
} else {
210-
formattedTableInfo = getTableInformation(table, isOutputPadded);
211-
}
199+
throws IOException {
200+
String formattedTableInfo = (partition != null) ? getPartitionInformation(table, partition) :
201+
getTableInformation(table, isOutputPadded);
212202

213203
if (table.getTableConstraintsInfo().isTableConstraintsInfoNotEmpty()) {
214204
formattedTableInfo += getConstraintsInformation(table);
@@ -333,24 +323,24 @@ private void getStorageDescriptorInfo(StringBuilder tableInfo, Table table, Stor
333323
List<String> skewedCoumnNames =
334324
storageDesc.getSkewedInfo().getSkewedColNames().stream()
335325
.sorted()
336-
.collect(Collectors.toList());
326+
.toList();
337327
formatOutput("Skewed Columns:", skewedCoumnNames.toString(), tableInfo);
338328
}
339329

340330
if (CollectionUtils.isNotEmpty(storageDesc.getSkewedInfo().getSkewedColValues())) {
341331
List<List<String>> skewedColumnValues =
342332
storageDesc.getSkewedInfo().getSkewedColValues().stream()
343333
.sorted(new VectorComparator<String>())
344-
.collect(Collectors.toList());
334+
.toList();
345335
formatOutput("Skewed Values:", skewedColumnValues.toString(), tableInfo);
346336
}
347337

348-
Map<List<String>, String> skewedColMap = new TreeMap<>(new VectorComparator<String>());
338+
Map<List<String>, String> skewedColMap = new TreeMap<>(new VectorComparator<>());
349339
skewedColMap.putAll(storageDesc.getSkewedInfo().getSkewedColValueLocationMaps());
350340
if (MapUtils.isNotEmpty(skewedColMap)) {
351341
formatOutput("Skewed Value to Path:", skewedColMap.toString(), tableInfo);
352342
Map<List<String>, String> truncatedSkewedColMap =
353-
new TreeMap<List<String>, String>(new VectorComparator<String>());
343+
new TreeMap<>(new VectorComparator<>());
354344
// walk through existing map to truncate path so that test won't mask it then we can verify location is right
355345
Set<Entry<List<String>, String>> entries = skewedColMap.entrySet();
356346
for (Entry<List<String>, String> entry : entries) {
@@ -399,7 +389,7 @@ private void getPartitionMetaDataInformation(StringBuilder tableInfo, Partition
399389
}
400390
}
401391

402-
private class VectorComparator<T extends Comparable<T>> implements Comparator<List<T>>{
392+
private static class VectorComparator<T extends Comparable<T>> implements Comparator<List<T>> {
403393
@Override
404394
public int compare(List<T> listA, List<T> listB) {
405395
for (int i = 0; i < listA.size() && i < listB.size(); i++) {
@@ -434,7 +424,7 @@ private void displayAllParameters(Map<String, String> params, StringBuilder tabl
434424

435425
private void displayAllParameters(Map<String, String> params, StringBuilder tableInfo, boolean escapeUnicode,
436426
boolean isOutputPadded) {
437-
List<String> keys = new ArrayList<String>(params.keySet());
427+
List<String> keys = new ArrayList<>(params.keySet());
438428
Collections.sort(keys);
439429
for (String key : keys) {
440430
String value = params.get(key);
@@ -622,7 +612,7 @@ private void addExtendedTableData(DataOutputStream out, Table table, Partition p
622612
}
623613

624614
private void addExtendedConstraintData(DataOutputStream out, Table table)
625-
throws IOException, UnsupportedEncodingException {
615+
throws IOException {
626616
if (table.getTableConstraintsInfo().isTableConstraintsInfoNotEmpty()) {
627617
out.write(("Constraints").getBytes(StandardCharsets.UTF_8));
628618
out.write(Utilities.tabCode);
@@ -654,7 +644,7 @@ private void addExtendedConstraintData(DataOutputStream out, Table table)
654644
}
655645

656646
private void addExtendedStorageData(DataOutputStream out, Table table)
657-
throws IOException, UnsupportedEncodingException {
647+
throws IOException {
658648
if (table.getStorageHandlerInfo() != null) {
659649
out.write(("StorageHandlerInfo").getBytes(StandardCharsets.UTF_8));
660650
out.write(Utilities.newLineCode);

ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ public static String getPartialName(Partition p, int level) throws HiveException
254254
* @throws HiveException
255255
*/
256256
public static String conflictingArchiveNameOrNull(Hive db, Table tbl,
257-
LinkedHashMap<String, String> partSpec)
257+
Map<String, String> partSpec)
258258
throws HiveException {
259259

260260
List<FieldSchema> partKeys = tbl.getPartitionKeys();

ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx,
287287
public static void getPartitionValues(VectorizedRowBatchCtx vrbCtx, PartitionDesc partDesc,
288288
Object[] partitionValues) {
289289

290-
LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
290+
Map<String, String> partSpec = partDesc.getPartSpec();
291291

292292
for (int i = 0; i < vrbCtx.partitionColumnCount; i++) {
293293
Object objectValue;

ql/src/java/org/apache/hadoop/hive/ql/metadata/DummyPartition.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,6 @@ public List<String> getValues() {
9090
Table table = this.getTable();
9191
values = new ArrayList<>();
9292

93-
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
9493
for (FieldSchema fs : table.getEffectivePartCols()) {
9594
String val = partSpec.get(fs.getName());
9695
values.add(val);

ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ public Partition(Table tbl, Map<String, String> partSpec, Path location) throws
124124

125125
public static org.apache.hadoop.hive.metastore.api.Partition createMetaPartitionObject(
126126
Table tbl, Map<String, String> partSpec, Path location) throws HiveException {
127-
List<String> pvals = new ArrayList<String>();
127+
List<String> pvals = new ArrayList<>();
128128
for (FieldSchema field : tbl.getEffectivePartCols()) {
129129
String val = partSpec.get(field.getName());
130130
if (val == null || val.isEmpty()) {
@@ -417,7 +417,7 @@ public Path[] getPath(Sample s) throws HiveException {
417417
}
418418

419419
int scount = s.getSampleFraction();
420-
ArrayList<Path> ret = new ArrayList<Path>();
420+
List<Path> ret = new ArrayList<>();
421421

422422
if (bcount == scount) {
423423
ret.add(getBucketPath(s.getSampleNum() - 1));
@@ -429,7 +429,7 @@ public Path[] getPath(Sample s) throws HiveException {
429429
}
430430
// undersampling a bucket
431431
ret.add(getBucketPath((s.getSampleNum() - 1) % bcount));
432-
} else if (bcount > scount) {
432+
} else {
433433
if ((bcount / scount) * scount != bcount) {
434434
throw new HiveException("Sample Count" + scount
435435
+ " is not a divisor of bucket count " + bcount + " for table "
@@ -440,11 +440,11 @@ public Path[] getPath(Sample s) throws HiveException {
440440
ret.add(getBucketPath(i * scount + (s.getSampleNum() - 1)));
441441
}
442442
}
443-
return (ret.toArray(new Path[ret.size()]));
443+
return (ret.toArray(new Path[0]));
444444
}
445445
}
446446

447-
public LinkedHashMap<String, String> getSpec() {
447+
public Map<String, String> getSpec() {
448448
return table.createSpec(tPartition);
449449
}
450450

@@ -543,7 +543,7 @@ public void setLocation(String location) {
543543
*/
544544
public void setValues(Map<String, String> partSpec)
545545
throws HiveException {
546-
List<String> pvals = new ArrayList<String>();
546+
List<String> pvals = new ArrayList<>();
547547
for (FieldSchema field : table.getEffectivePartCols()) {
548548
String val = partSpec.get(field.getName());
549549
if (val == null) {
@@ -583,12 +583,11 @@ public List<String> getSkewedColNames() {
583583
return tPartition.getSd().getSkewedInfo().getSkewedColNames();
584584
}
585585

586-
public void setSkewedValueLocationMap(List<String> valList, String dirName)
587-
throws HiveException {
586+
public void setSkewedValueLocationMap(List<String> valList, String dirName) {
588587
Map<List<String>, String> mappings = tPartition.getSd().getSkewedInfo()
589588
.getSkewedColValueLocationMaps();
590589
if (null == mappings) {
591-
mappings = new HashMap<List<String>, String>();
590+
mappings = new HashMap<>();
592591
tPartition.getSd().getSkewedInfo().setSkewedColValueLocationMaps(mappings);
593592
}
594593

@@ -613,8 +612,7 @@ public int hashCode() {
613612

614613
@Override
615614
public boolean equals(Object obj) {
616-
if (obj instanceof Partition) {
617-
Partition o = (Partition) obj;
615+
if (obj instanceof Partition o) {
618616
return Objects.equals(tPartition, o.tPartition);
619617
}
620618
return false;

0 commit comments

Comments
 (0)