-
Notifications
You must be signed in to change notification settings - Fork 4.8k
HIVE-29413: Avoid code duplication by updating getPartCols method for iceberg tables #6413
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
40b675f
dcc6bce
fd02020
2d70019
0f1ca01
deeac50
63960b0
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -66,10 +66,7 @@ private List<FieldSchema> getColumnsByPattern() throws HiveException { | |
|
|
||
| private List<FieldSchema> getCols() throws HiveException { | ||
| Table table = context.getDb().getTable(desc.getTableName()); | ||
| List<FieldSchema> allColumns = new ArrayList<>(); | ||
| allColumns.addAll(table.getCols()); | ||
| allColumns.addAll(table.getPartCols()); | ||
| return allColumns; | ||
| return new ArrayList<>(table.getAllCols()); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. table.getAllCols() already created a new ArrayList - one more is redundant |
||
| } | ||
|
|
||
| private Matcher getMatcher() { | ||
|
|
@@ -94,13 +91,7 @@ private List<FieldSchema> filterColumns(List<FieldSchema> columns, Matcher match | |
| } | ||
|
|
||
| if (desc.isSorted()) { | ||
| result.sort( | ||
| new Comparator<FieldSchema>() { | ||
| @Override | ||
| public int compare(FieldSchema f1, FieldSchema f2) { | ||
| return f1.getName().compareTo(f2.getName()); | ||
| } | ||
| }); | ||
| result.sort(Comparator.comparing(FieldSchema::getName)); | ||
| } | ||
| return result; | ||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -100,7 +100,7 @@ private Table createViewLikeTable(Table oldTable) throws HiveException { | |
| setUserSpecifiedLocation(table); | ||
|
|
||
| table.setFields(oldTable.getCols()); | ||
| table.setPartCols(oldTable.getPartCols()); | ||
| table.setPartCols(oldTable.getEffectivePartCols()); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. we should keep this unchanged. that would probably try to persist into HMS |
||
|
|
||
| if (desc.getDefaultSerdeProps() != null) { | ||
| for (Map.Entry<String, String> e : desc.getDefaultSerdeProps().entrySet()) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -84,7 +84,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition | |
| if (AcidUtils.isTransactionalTable(sourceTable) || AcidUtils.isTransactionalTable(destTable)) { | ||
| throw new SemanticException(ErrorMsg.EXCHANGE_PARTITION_NOT_ALLOWED_WITH_TRANSACTIONAL_TABLES.getMsg()); | ||
| } | ||
| List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(), partitionSpecs); | ||
| List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getEffectivePartCols(), partitionSpecs); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. are you sure we need to change here? i don't think we support |
||
|
|
||
| // check if source partition exists | ||
| GetPartitionsFilterSpec sourcePartitionsFilterSpec = new GetPartitionsFilterSpec(); | ||
|
|
@@ -106,7 +106,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition | |
| throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partitionSpecs.toString())); | ||
| } | ||
|
|
||
| List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(), partitionSpecs); | ||
| List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getEffectivePartCols(), partitionSpecs); | ||
|
|
||
| // check if dest partition exists | ||
| GetPartitionsFilterSpec getDestPartitionsFilterSpec = new GetPartitionsFilterSpec(); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -102,7 +102,7 @@ ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws Semant | |
| if (astChild.getType() == HiveParser.TOK_WHERE) { | ||
| RowResolver rwsch = new RowResolver(); | ||
| Map<String, String> colTypes = new HashMap<String, String>(); | ||
| for (FieldSchema fs : table.getPartCols()) { | ||
| for (FieldSchema fs : table.getEffectivePartCols()) { | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. could you please check if this is supported in iceberg and add test if yes |
||
| rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), | ||
| TypeInfoFactory.stringTypeInfo, null, true)); | ||
| colTypes.put(fs.getName().toLowerCase(), fs.getType()); | ||
|
|
@@ -202,8 +202,8 @@ private String getShowPartitionsOrder(Table table, ASTNode command) throws Seman | |
| if (astChild.getType() == HiveParser.TOK_ORDERBY) { | ||
| Map<String, Integer> poses = new HashMap<String, Integer>(); | ||
| RowResolver rwsch = new RowResolver(); | ||
| for (int i = 0; i < table.getPartCols().size(); i++) { | ||
| FieldSchema fs = table.getPartCols().get(i); | ||
| for (int i = 0; i < table.getEffectivePartCols().size(); i++) { | ||
| FieldSchema fs = table.getEffectivePartCols().get(i); | ||
| rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), | ||
| TypeInfoFactory.getPrimitiveTypeInfo(fs.getType()), null, true)); | ||
| poses.put(fs.getName().toLowerCase(), i); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -129,7 +129,7 @@ private Path getOriginalDir(Table table, PartSpecInfo partitionSpecInfo, List<Pa | |
| // in full partition specification case we allow custom locations to keep backward compatibility | ||
| if (partitions.isEmpty()) { | ||
| throw new HiveException("No partition matches the specification"); | ||
| } else if (partitionSpecInfo.values.size() != table.getPartCols().size()) { | ||
| } else if (partitionSpecInfo.values.size() != table.getEffectivePartCols().size()) { | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. do we need it here? |
||
| // for partial specifications we need partitions to follow the scheme | ||
| for (Partition partition : partitions) { | ||
| if (AlterTableArchiveUtils.partitionInCustomLocation(table, partition)) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -71,7 +71,7 @@ static void setOriginalLocation(Partition partition, String loc) { | |
| static boolean partitionInCustomLocation(Table table, Partition partition) throws HiveException { | ||
| String subdir = null; | ||
| try { | ||
| subdir = Warehouse.makePartName(table.getPartCols(), partition.getValues()); | ||
| subdir = Warehouse.makePartName(table.getEffectivePartCols(), partition.getValues()); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same, i don't even know what this archive is doing |
||
| } catch (MetaException e) { | ||
| throw new HiveException("Unable to get partition's directory", e); | ||
| } | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@ramitg254 it's unnecessary here.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
yes , but getEffectivePartCols() will be renamed to getPartcols eventually when we will be able to drop the current getPartCols() after updation in getCols() as per : #6413 (comment)