Skip to content

Commit

Permalink
refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
deniskuzZ committed Oct 21, 2024
1 parent f27a60d commit 60840a7
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 45 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2007,11 +2007,7 @@ public List<Partition> getPartitions(org.apache.hadoop.hive.ql.metadata.Table ta
.map(partName -> {
Map<String, String> partSpecMap = Maps.newLinkedHashMap();
Warehouse.makeSpecFromName(partSpecMap, new Path(partName), null);
try {
return new DummyPartition(table, partName, partSpecMap);
} catch (HiveException e) {
throw new RuntimeException("Unable to construct name for dummy partition due to: ", e);
}
return new DummyPartition(table, partName, partSpecMap);
}).collect(Collectors.toList());
}

Expand All @@ -2038,7 +2034,7 @@ private Partition getPartitionImpl(org.apache.hadoop.hive.ql.metadata.Table tabl
try {
String partName = Warehouse.makePartName(partitionSpec, false);
return new DummyPartition(table, partName, partitionSpec);
} catch (MetaException | HiveException e) {
} catch (MetaException e) {
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);
}
}
Expand Down Expand Up @@ -2151,10 +2147,12 @@ public List<Partition> getPartitionsByExpr(org.apache.hadoop.hive.ql.metadata.Ta
}

@Override
public List<Partition> getPartitionsByExpr(org.apache.hadoop.hive.ql.metadata.Table hmsTable,
ExprNodeDesc filter, boolean latestSpecOnly) throws SemanticException {
SearchArgument sarg = ConvertAstToSearchArg.create(conf, (ExprNodeGenericFuncDesc) filter);
Expression exp = HiveIcebergFilterFactory.generateFilterExpression(sarg);
public List<Partition> getPartitionsByExpr(org.apache.hadoop.hive.ql.metadata.Table hmsTable, ExprNodeDesc filter,
boolean latestSpecOnly) throws SemanticException {
Expression exp = HiveIcebergInputFormat.getFilterExpr(conf, (ExprNodeGenericFuncDesc) filter);
if (exp == null) {
return Lists.newArrayList(new DummyPartition(hmsTable));
}
Table table = IcebergTableUtil.getTable(conf, hmsTable.getTTable());
int tableSpecId = table.spec().specId();
Set<Partition> partitions = Sets.newHashSet();
Expand All @@ -2170,12 +2168,7 @@ public List<Partition> getPartitionsByExpr(org.apache.hadoop.hive.ql.metadata.Ta
String partName = spec.partitionToPath(partitionData);
Map<String, String> partSpecMap = Maps.newLinkedHashMap();
Warehouse.makeSpecFromName(partSpecMap, new Path(partName), null);
DummyPartition partition;
try {
partition = new DummyPartition(hmsTable, partName, partSpecMap);
} catch (HiveException e) {
throw new RuntimeException("Unable to construct name for dummy partition due to: ", e);
}
DummyPartition partition = new DummyPartition(hmsTable, partName, partSpecMap);
partitions.add(partition);
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ private void addTruncateTableOutputs(ASTNode root, Table table, Map<String, Stri
String partName = Warehouse.makePartName(partitionSpec, false);
Partition partition = new DummyPartition(table, partName, partitionSpec);
outputs.add(new WriteEntity(partition, writeType));
} catch (MetaException | HiveException e) {
} catch (MetaException e) {
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -425,15 +425,11 @@ private List<HiveLockObj> getLockObjects(QueryPlan plan, Database db,
String[] nameValue = partn.split("=");
assert(nameValue.length == 2);
partialSpec.put(nameValue[0], nameValue[1]);
DummyPartition par;
try {
par = new DummyPartition(p.getTable(), p.getTable().getDbName()
+ "/" + FileUtils.escapePathName(p.getTable().getTableName()).toLowerCase()
DummyPartition par = new DummyPartition(p.getTable(),
p.getTable().getDbName()
+ "/" + FileUtils.escapePathName(p.getTable().getTableName()).toLowerCase()
+ "/" + partialName,
partialSpec);
} catch (HiveException e) {
throw new LockException("Unable to construct name for dummy partition due to: ", e);
}
partialSpec);
locks.add(new HiveLockObj(new HiveLockObject(par, lockData), mode));
partialName.append("/");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,17 +44,21 @@ public DummyPartition() {
public DummyPartition(Table tbl, String name) {
setTable(tbl);
this.name = name;
}
}

public DummyPartition(Table tbl) {
this(tbl, null, Maps.newHashMap());
}

public DummyPartition(Table tbl, String name, Map<String, String> partSpec) throws HiveException {
org.apache.hadoop.hive.metastore.api.Partition tPart =
public DummyPartition(Table tbl, String name, Map<String, String> partSpec) {
this(tbl, name);
org.apache.hadoop.hive.metastore.api.Partition tPart =
new org.apache.hadoop.hive.metastore.api.Partition();
tPart.setSd(tbl.getSd().deepCopy());
tPart.setParameters(Maps.newHashMap());

this.name = name;
this.partSpec = Maps.newLinkedHashMap(partSpec);
initialize(tbl,tPart);
setTPartition(tPart);
}

public String getName() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -510,12 +510,8 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set<ReadEntit
if (partsList == null) {
Table tab = tsOp.getConf().getTableMetadata();
if (tab.alwaysUnpartitioned()) {
try {
partsList = new PrunedPartitionList(tab, null, Sets.newHashSet(new Partition(tab)),
Collections.emptyList(), false);
} catch (HiveException e) {
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);
}
partsList = new PrunedPartitionList(tab, null, Sets.newHashSet(new DummyPartition(tab)),
Collections.emptyList(), false);
} else {
partsList = PartitionPruner.prune(tsOp, parseCtx, alias_id);
isFullAcidTable = tsOp.getConf().isFullAcidTable();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7752,7 +7752,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input)
try {
String partName = Warehouse.makePartName(partSpec, false);
dummyPartition = new DummyPartition(destinationTable, partName, partSpec);
} catch (MetaException | HiveException e) {
} catch (MetaException e) {
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);
}
if (!outputs.add(new WriteEntity(dummyPartition, determineWriteType(ltd, dest)))) {
Expand Down Expand Up @@ -8661,14 +8661,9 @@ private WriteEntity generateTableWriteEntity(String dest, Table dest_tab,
else {
String ppath = dpCtx.getSPPath();
ppath = ppath.substring(0, ppath.length() - 1);
DummyPartition p;
try {
p = new DummyPartition(dest_tab, dest_tab.getDbName() +
"@" + dest_tab.getTableName() + "@" + ppath,
partSpec);
} catch (HiveException e) {
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);
}
DummyPartition p = new DummyPartition(dest_tab,
dest_tab.getDbName() + "@" + dest_tab.getTableName() + "@" + ppath,
partSpec);
WriteEntity.WriteType writeType;
if (ltd.isInsertOverwrite()) {
writeType = WriteEntity.WriteType.INSERT_OVERWRITE;
Expand Down

0 comments on commit 60840a7

Please sign in to comment.