From bf3ffb0335246c39180b07647b66353a66afa8e4 Mon Sep 17 00:00:00 2001 From: rtotaro Date: Thu, 22 Apr 2021 08:26:58 +0200 Subject: [PATCH] #197 - In Progress: compiled with hive 3 dependencies --- pom.xml | 55 ++- .../server/FederatedHMSHandler.java | 249 +++++++++++- .../server/FederatedHMSHandlerFactory.java | 4 +- .../server/FederatedHMSHandlerHive3.java | 373 ++++++++++++++++++ .../server/FederatedHMSHandlerTest.java | 86 +--- .../bdp/waggledance/WaggleDanceRunner.java | 19 +- 6 files changed, 676 insertions(+), 110 deletions(-) create mode 100644 waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerHive3.java diff --git a/pom.xml b/pom.xml index e8a533462..e102e65a8 100644 --- a/pom.xml +++ b/pom.xml @@ -37,13 +37,13 @@ 2.0.4.RELEASE 3.1.0 2.2 - 3.1.2 + 3.0.0 4.13.1 3.5.15 3.1.5 1.9 1.8.9 - 4.0.0 + 5.0.0 23.0 4.0 1.2.3 @@ -81,6 +81,38 @@ javax.el 2.2.6 + + junit + junit + ${junit.version} + test + + + org.hamcrest + hamcrest-core + + + + + org.mockito + mockito-core + ${mockito.version} + test + + + + net.bytebuddy + byte-buddy + 1.10.15 + test + + + net.bytebuddy + byte-buddy-agent + 1.10.15 + test + org.springframework.boot spring-boot-dependencies @@ -118,6 +150,12 @@ ${hive.version} compile + + org.hamcrest + hamcrest + ${hamcrest.version} + test + org.yaml snakeyaml @@ -126,7 +164,8 @@ org.apache.derby derby - + 10.10.2.0 test @@ -153,6 +192,8 @@ ${maven.release.plugin.version} + org.codehaus.mojo aspectj-maven-plugin ${aspectj-maven-plugin.version} @@ -224,15 +265,7 @@ - src/main/java/com/hotels/bdp/waggledance/api/validation/constraint/TunnelRoute.java - src/main/java/com/hotels/bdp/waggledance/api/validation/validator/TunnelRouteValidator.java - - src/main/java/com/hotels/bdp/waggledance/client/ThriftMetastoreClient.java src/main/java/com/hotels/bdp/waggledance/server/MetaStoreProxyServer.java - src/main/java/com/hotels/bdp/waggledance/validation/Preconditions.java - src/main/java/com/hotels/bdp/waggledance/parse/ASTConverter.java - src/main/java/com/hotels/bdp/waggledance/parse/ASTNodeUtils.java - src/main/java/com/hotels/bdp/waggledance/parse/Rule.java diff --git a/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandler.java b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandler.java index 0cd61e3ef..af67cb958 100644 --- a/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandler.java +++ b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandler.java @@ -21,27 +21,44 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.MetaStoreEventListener; +import org.apache.hadoop.hive.metastore.RawStore; +import org.apache.hadoop.hive.metastore.TransactionalMetaStoreEventListener; +import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AbortTxnRequest; import org.apache.hadoop.hive.metastore.api.AbortTxnsRequest; +import org.apache.hadoop.hive.metastore.api.AddCheckConstraintRequest; +import org.apache.hadoop.hive.metastore.api.AddDefaultConstraintRequest; import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions; import org.apache.hadoop.hive.metastore.api.AddForeignKeyRequest; +import org.apache.hadoop.hive.metastore.api.AddNotNullConstraintRequest; import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest; import org.apache.hadoop.hive.metastore.api.AddPartitionsResult; import org.apache.hadoop.hive.metastore.api.AddPrimaryKeyRequest; +import org.apache.hadoop.hive.metastore.api.AddUniqueConstraintRequest; import org.apache.hadoop.hive.metastore.api.AggrStats; +import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest; +import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsResponse; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.CacheFileMetadataRequest; import org.apache.hadoop.hive.metastore.api.CacheFileMetadataResult; +import org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest; +import org.apache.hadoop.hive.metastore.api.CheckConstraintsResponse; import org.apache.hadoop.hive.metastore.api.CheckLockRequest; import org.apache.hadoop.hive.metastore.api.ClearFileMetadataRequest; import org.apache.hadoop.hive.metastore.api.ClearFileMetadataResult; +import org.apache.hadoop.hive.metastore.api.CmRecycleRequest; +import org.apache.hadoop.hive.metastore.api.CmRecycleResponse; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.CommitTxnRequest; import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionResponse; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; +import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest; +import org.apache.hadoop.hive.metastore.api.DefaultConstraintsResponse; import org.apache.hadoop.hive.metastore.api.DropConstraintRequest; import org.apache.hadoop.hive.metastore.api.DropPartitionsRequest; import org.apache.hadoop.hive.metastore.api.DropPartitionsResult; @@ -67,6 +84,8 @@ import org.apache.hadoop.hive.metastore.api.GetTableResult; import org.apache.hadoop.hive.metastore.api.GetTablesRequest; import org.apache.hadoop.hive.metastore.api.GetTablesResult; +import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsRequest; +import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsResponse; import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeRequest; import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeResponse; import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleRequest; @@ -84,12 +103,17 @@ import org.apache.hadoop.hive.metastore.api.LockComponent; import org.apache.hadoop.hive.metastore.api.LockRequest; import org.apache.hadoop.hive.metastore.api.LockResponse; +import org.apache.hadoop.hive.metastore.api.Materialization; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchLockException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; +import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest; +import org.apache.hadoop.hive.metastore.api.NotNullConstraintsResponse; import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; +import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest; +import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse; import org.apache.hadoop.hive.metastore.api.OpenTxnRequest; import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.Partition; @@ -108,9 +132,14 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PutFileMetadataRequest; import org.apache.hadoop.hive.metastore.api.PutFileMetadataResult; +import org.apache.hadoop.hive.metastore.api.ReplTblWriteIdStateRequest; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; @@ -124,10 +153,13 @@ import org.apache.hadoop.hive.metastore.api.TxnAbortedException; import org.apache.hadoop.hive.metastore.api.TxnOpenException; import org.apache.hadoop.hive.metastore.api.Type; +import org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest; +import org.apache.hadoop.hive.metastore.api.UniqueConstraintsResponse; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.api.UnlockRequest; +import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -147,14 +179,14 @@ @Monitored @Component @Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE) -class FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { +abstract class FederatedHMSHandler extends FacebookBase implements CloseableIHMSHandler { private static final Logger LOG = LoggerFactory.getLogger(FederatedHMSHandler.class); private static final String INVOCATION_LOG_NAME = "com.hotels.bdp.waggledance.server.invocation-log"; - private final MappingEventListener databaseMappingService; - private final NotifyingFederationService notifyingFederationService; - private Configuration conf; + final MappingEventListener databaseMappingService; + final NotifyingFederationService notifyingFederationService; + Configuration conf; FederatedHMSHandler( MappingEventListener databaseMappingService, @@ -351,13 +383,21 @@ public void drop_table_with_environment_context( environment_context); } + @Override + public void truncate_table(String s, String s1, List list) + throws MetaException, TException + { + + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public List get_tables(String db_name, String pattern) throws MetaException, TException { DatabaseMapping mapping = databaseMappingService.databaseMapping(db_name); List resultTables = mapping.getClient().get_tables(mapping.transformInboundDatabaseName(db_name), pattern); resultTables = databaseMappingService.filterTables(db_name, resultTables, mapping); - return mapping.getMetastoreFilter().filterTableNames(db_name, resultTables); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterTableNames(null,db_name, resultTables); } @@ -368,7 +408,8 @@ public List get_all_tables(String db_name) throws MetaException, TExcept DatabaseMapping mapping = databaseMappingService.databaseMapping(db_name); List resultTables = mapping.getClient().get_all_tables(mapping.transformInboundDatabaseName(db_name)); resultTables = databaseMappingService.filterTables(db_name, resultTables, mapping); - return mapping.getMetastoreFilter().filterTableNames(db_name, resultTables); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterTableNames(null, db_name, resultTables); } @Override @@ -405,7 +446,8 @@ public List get_table_names_by_filter(String dbname, String filter, shor List resultTables = mapping.getClient() .get_table_names_by_filter(mapping.transformInboundDatabaseName(dbname), filter, max_tables); List result = databaseMappingService.filterTables(dbname, resultTables, mapping); - return mapping.getMetastoreFilter().filterTableNames(dbname, result); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterTableNames(null, dbname, result); } @Override @@ -722,7 +764,8 @@ public List get_partition_names(String db_name, String tbl_name, short m DatabaseMapping mapping = getDbMappingAndCheckTableAllowed(db_name, tbl_name); List result = mapping.getClient() .get_partition_names(mapping.transformInboundDatabaseName(db_name), tbl_name, max_parts); - return mapping.getMetastoreFilter().filterPartitionNames(db_name, tbl_name, result); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterPartitionNames(null, db_name, tbl_name, result); } @Override @@ -762,7 +805,8 @@ public List get_partition_names_ps(String db_name, String tbl_name, List List result = mapping .getClient() .get_partition_names_ps(mapping.transformInboundDatabaseName(db_name), tbl_name, part_vals, max_parts); - return mapping.getMetastoreFilter().filterPartitionNames(db_name, tbl_name, result); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterPartitionNames(null, db_name, tbl_name, result); } @Override @@ -1187,6 +1231,13 @@ public GrantRevokePrivilegeResponse grant_revoke_privileges(GrantRevokePrivilege return getPrimaryClient().grant_revoke_privileges(request); } + @Override + public GrantRevokePrivilegeResponse refresh_privileges(HiveObjectRef hiveObjectRef, GrantRevokePrivilegeRequest grantRevokePrivilegeRequest) + throws MetaException, TException + { + return null; + } + private DatabaseMapping checkWritePermissionsForPrivileges(PrivilegeBag privileges) throws NoSuchObjectException { DatabaseMapping mapping = databaseMappingService .databaseMapping(privileges.getPrivileges().get(0).getHiveObject().getDbName()); @@ -1256,6 +1307,27 @@ public void commit_txn(CommitTxnRequest rqst) throws NoSuchTxnException, TxnAbor getPrimaryClient().commit_txn(rqst); } + @Override + public void repl_tbl_writeid_state(ReplTblWriteIdStateRequest replTblWriteIdStateRequest) + throws TException + { + + } + + @Override + public GetValidWriteIdsResponse get_valid_write_ids(GetValidWriteIdsRequest getValidWriteIdsRequest) + throws NoSuchTxnException, MetaException, TException + { + return null; + } + + @Override + public AllocateTableWriteIdsResponse allocate_table_write_ids(AllocateTableWriteIdsRequest allocateTableWriteIdsRequest) + throws NoSuchTxnException, TxnAbortedException, MetaException, TException + { + return null; + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public LockResponse lock(LockRequest rqst) throws NoSuchTxnException, TxnAbortedException, TException { @@ -1346,6 +1418,64 @@ public void setConf(Configuration conf) { @Override public void init() throws MetaException {} + @Override + public int getThreadId() + { + return 0; + } + + @Override + public RawStore getMS() + throws MetaException + { + return null; + } + + @Override + public TxnStore getTxnHandler() + { + return null; + } + + @Override + public Warehouse getWh() + { + return null; + } + + @Override + public Database get_database_core(String catName, String name) + throws NoSuchObjectException, MetaException + { + return null; + } + + @Override + public Table get_table_core(String catName, String dbname, String name) + throws MetaException, NoSuchObjectException + { + return null; + } + + @Override + public Table get_table_core(String dbName, String name) + throws MetaException, NoSuchObjectException + { + return null; + } + + @Override + public List getTransactionalListeners() + { + return null; + } + + @Override + public List getListeners() + { + return null; + } + // Hive 2.1.0 methods @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) @@ -1367,6 +1497,34 @@ public void add_foreign_key(AddForeignKeyRequest req) throws NoSuchObjectExcepti getPrimaryClient().add_foreign_key(req); } + @Override + public void add_unique_constraint(AddUniqueConstraintRequest addUniqueConstraintRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public void add_not_null_constraint(AddNotNullConstraintRequest addNotNullConstraintRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public void add_default_constraint(AddDefaultConstraintRequest addDefaultConstraintRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public void add_check_constraint(AddCheckConstraintRequest addCheckConstraintRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public int add_master_key(String key) throws MetaException, TException { @@ -1426,10 +1584,11 @@ public ClearFileMetadataResult clear_file_metadata(ClearFileMetadataRequest req) @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) - public void create_table_with_constraints(Table tbl, List primaryKeys, List foreignKeys) - throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { + public void create_table_with_constraints(Table tbl, List primaryKeys, List foreignKeys, List uniqueConstraints, List notNullConstraints, List defaultConstraints, List checkConstraints) + throws TException + { DatabaseMapping mapping = checkWritePermissions(tbl.getDbName()); - mapping.getClient().create_table_with_constraints(mapping.transformInboundTable(tbl), primaryKeys, foreignKeys); + mapping.getClient().create_table_with_constraints(mapping.transformInboundTable(tbl), primaryKeys, foreignKeys,uniqueConstraints,notNullConstraints,defaultConstraints,checkConstraints); } @Override @@ -1470,6 +1629,13 @@ public void flushCache() throws TException { getPrimaryClient().flushCache(); } + @Override + public CmRecycleResponse cm_recycle(CmRecycleRequest cmRecycleRequest) + throws MetaException, TException + { + return null; + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public GetAllFunctionsResponse get_all_functions() throws TException { @@ -1490,6 +1656,13 @@ public CurrentNotificationEventId get_current_notificationEventId() throws TExce return getPrimaryClient().get_current_notificationEventId(); } + @Override + public NotificationEventsCountResponse get_notification_events_count(NotificationEventsCountRequest notificationEventsCountRequest) + throws TException + { + return null; + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public List get_fields_with_environment_context( @@ -1526,6 +1699,34 @@ public ForeignKeysResponse get_foreign_keys(ForeignKeysRequest request) mapping.getClient().get_foreign_keys(mapping.transformInboundForeignKeysRequest(request))); } + @Override + public UniqueConstraintsResponse get_unique_constraints(UniqueConstraintsRequest uniqueConstraintsRequest) + throws MetaException, NoSuchObjectException, TException + { + return null; + } + + @Override + public NotNullConstraintsResponse get_not_null_constraints(NotNullConstraintsRequest notNullConstraintsRequest) + throws MetaException, NoSuchObjectException, TException + { + return null; + } + + @Override + public DefaultConstraintsResponse get_default_constraints(DefaultConstraintsRequest defaultConstraintsRequest) + throws MetaException, NoSuchObjectException, TException + { + return null; + } + + @Override + public CheckConstraintsResponse get_check_constraints(CheckConstraintsRequest checkConstraintsRequest) + throws MetaException, NoSuchObjectException, TException + { + return null; + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public List get_master_keys() throws TException { @@ -1618,7 +1819,15 @@ public List get_tables_by_type(String db_name, String pattern, String ta DatabaseMapping mapping = databaseMappingService.databaseMapping(db_name); List resultTables = mapping.getClient().get_tables_by_type(mapping.transformInboundDatabaseName(db_name), pattern, tableType); List result = databaseMappingService.filterTables(db_name, resultTables, mapping); - return mapping.getMetastoreFilter().filterTableNames(db_name, result); + //TODO: check null as catName + return mapping.getMetastoreFilter().filterTableNames(null, db_name, result); + } + + @Override + public List get_materialized_views_for_rewriting(String s) + throws MetaException, TException + { + return null; } @Override @@ -1644,6 +1853,20 @@ public GetTablesResult get_table_objects_by_name_req(GetTablesRequest req) return mapping.transformOutboundGetTablesResult(result); } + @Override + public Map get_materialization_invalidation_info(String s, List list) + throws MetaException, InvalidOperationException, UnknownDBException, TException + { + return null; + } + + @Override + public void update_creation_metadata(String s, String s1, String s2, CreationMetadata creationMetadata) + throws MetaException, InvalidOperationException, UnknownDBException, TException + { + + } + @Override @Loggable(value = Loggable.DEBUG, skipResult = true, name = INVOCATION_LOG_NAME) public CompactionResponse compact2(CompactionRequest rqst) throws TException { diff --git a/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerFactory.java b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerFactory.java index 653959219..0c892691b 100644 --- a/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerFactory.java +++ b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerFactory.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2016-2020 Expedia, Inc. + * Copyright (C) 2016-2021 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -57,7 +57,7 @@ public CloseableIHMSHandler create() { MappingEventListener service = createDatabaseMappingService(); MonitoredDatabaseMappingService monitoredService = new MonitoredDatabaseMappingService(service); - CloseableIHMSHandler baseHandler = new FederatedHMSHandler(monitoredService, notifyingFederationService); + CloseableIHMSHandler baseHandler = new FederatedHMSHandlerHive3(monitoredService, notifyingFederationService); HiveConf conf = new HiveConf(hiveConf); baseHandler.setConf(conf); return baseHandler; diff --git a/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerHive3.java b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerHive3.java new file mode 100644 index 000000000..87cfeb0d7 --- /dev/null +++ b/waggle-dance-core/src/main/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerHive3.java @@ -0,0 +1,373 @@ +/** + * Copyright (C) 2016-2021 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.hotels.bdp.waggledance.server; + +import java.util.List; + +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.AlterISchemaRequest; +import org.apache.hadoop.hive.metastore.api.CreateCatalogRequest; +import org.apache.hadoop.hive.metastore.api.DropCatalogRequest; +import org.apache.hadoop.hive.metastore.api.FindSchemasByColsResp; +import org.apache.hadoop.hive.metastore.api.FindSchemasByColsRqst; +import org.apache.hadoop.hive.metastore.api.GetCatalogRequest; +import org.apache.hadoop.hive.metastore.api.GetCatalogResponse; +import org.apache.hadoop.hive.metastore.api.GetCatalogsResponse; +import org.apache.hadoop.hive.metastore.api.GetRuntimeStatsRequest; +import org.apache.hadoop.hive.metastore.api.GetSerdeRequest; +import org.apache.hadoop.hive.metastore.api.ISchema; +import org.apache.hadoop.hive.metastore.api.ISchemaName; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.LockResponse; +import org.apache.hadoop.hive.metastore.api.MapSchemaVersionToSerdeRequest; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.RuntimeStat; +import org.apache.hadoop.hive.metastore.api.SchemaVersion; +import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.SetSchemaVersionStateRequest; +import org.apache.hadoop.hive.metastore.api.WMAlterPoolRequest; +import org.apache.hadoop.hive.metastore.api.WMAlterPoolResponse; +import org.apache.hadoop.hive.metastore.api.WMAlterResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMAlterResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMAlterTriggerRequest; +import org.apache.hadoop.hive.metastore.api.WMAlterTriggerResponse; +import org.apache.hadoop.hive.metastore.api.WMCreateOrDropTriggerToPoolMappingRequest; +import org.apache.hadoop.hive.metastore.api.WMCreateOrDropTriggerToPoolMappingResponse; +import org.apache.hadoop.hive.metastore.api.WMCreateOrUpdateMappingRequest; +import org.apache.hadoop.hive.metastore.api.WMCreateOrUpdateMappingResponse; +import org.apache.hadoop.hive.metastore.api.WMCreatePoolRequest; +import org.apache.hadoop.hive.metastore.api.WMCreatePoolResponse; +import org.apache.hadoop.hive.metastore.api.WMCreateResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMCreateResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMCreateTriggerRequest; +import org.apache.hadoop.hive.metastore.api.WMCreateTriggerResponse; +import org.apache.hadoop.hive.metastore.api.WMDropMappingRequest; +import org.apache.hadoop.hive.metastore.api.WMDropMappingResponse; +import org.apache.hadoop.hive.metastore.api.WMDropPoolRequest; +import org.apache.hadoop.hive.metastore.api.WMDropPoolResponse; +import org.apache.hadoop.hive.metastore.api.WMDropResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMDropResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMDropTriggerRequest; +import org.apache.hadoop.hive.metastore.api.WMDropTriggerResponse; +import org.apache.hadoop.hive.metastore.api.WMGetActiveResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMGetActiveResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMGetAllResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMGetAllResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMGetResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMGetResourcePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMGetTriggersForResourePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMGetTriggersForResourePlanResponse; +import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanRequest; +import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse; +import org.apache.thrift.TException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.hotels.bdp.waggledance.mapping.service.MappingEventListener; +import com.hotels.bdp.waggledance.mapping.service.impl.NotifyingFederationService; + +public class FederatedHMSHandlerHive3 extends FederatedHMSHandler +{ + private static final Logger LOG = LoggerFactory.getLogger(FederatedHMSHandlerHive3.class); + + FederatedHMSHandlerHive3(MappingEventListener databaseMappingService, NotifyingFederationService notifyingFederationService) + { + super(databaseMappingService, notifyingFederationService); + } + + @Override + public void create_catalog(CreateCatalogRequest createCatalogRequest) + throws AlreadyExistsException, InvalidObjectException, MetaException, TException + { + + } + + @Override + public GetCatalogResponse get_catalog(GetCatalogRequest getCatalogRequest) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public GetCatalogsResponse get_catalogs() + throws MetaException, TException + { + return null; + } + + @Override + public void drop_catalog(DropCatalogRequest dropCatalogRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + + } + + @Override + public String get_metastore_db_uuid() + throws MetaException, TException + { + return null; + } + + @Override + public WMCreateResourcePlanResponse create_resource_plan(WMCreateResourcePlanRequest wmCreateResourcePlanRequest) + throws AlreadyExistsException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMGetResourcePlanResponse get_resource_plan(WMGetResourcePlanRequest wmGetResourcePlanRequest) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public WMGetActiveResourcePlanResponse get_active_resource_plan(WMGetActiveResourcePlanRequest wmGetActiveResourcePlanRequest) + throws MetaException, TException + { + return null; + } + + @Override + public WMGetAllResourcePlanResponse get_all_resource_plans(WMGetAllResourcePlanRequest wmGetAllResourcePlanRequest) + throws MetaException, TException + { + return null; + } + + @Override + public WMAlterResourcePlanResponse alter_resource_plan(WMAlterResourcePlanRequest wmAlterResourcePlanRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + return null; + } + + @Override + public WMValidateResourcePlanResponse validate_resource_plan(WMValidateResourcePlanRequest wmValidateResourcePlanRequest) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public WMDropResourcePlanResponse drop_resource_plan(WMDropResourcePlanRequest wmDropResourcePlanRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + return null; + } + + @Override + public WMCreateTriggerResponse create_wm_trigger(WMCreateTriggerRequest wmCreateTriggerRequest) + throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMAlterTriggerResponse alter_wm_trigger(WMAlterTriggerRequest wmAlterTriggerRequest) + throws NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMDropTriggerResponse drop_wm_trigger(WMDropTriggerRequest wmDropTriggerRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + return null; + } + + @Override + public WMGetTriggersForResourePlanResponse get_triggers_for_resourceplan(WMGetTriggersForResourePlanRequest wmGetTriggersForResourePlanRequest) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public WMCreatePoolResponse create_wm_pool(WMCreatePoolRequest wmCreatePoolRequest) + throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMAlterPoolResponse alter_wm_pool(WMAlterPoolRequest wmAlterPoolRequest) + throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMDropPoolResponse drop_wm_pool(WMDropPoolRequest wmDropPoolRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + return null; + } + + @Override + public WMCreateOrUpdateMappingResponse create_or_update_wm_mapping(WMCreateOrUpdateMappingRequest wmCreateOrUpdateMappingRequest) + throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public WMDropMappingResponse drop_wm_mapping(WMDropMappingRequest wmDropMappingRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + return null; + } + + @Override + public WMCreateOrDropTriggerToPoolMappingResponse create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest wmCreateOrDropTriggerToPoolMappingRequest) + throws AlreadyExistsException, NoSuchObjectException, InvalidObjectException, MetaException, TException + { + return null; + } + + @Override + public void create_ischema(ISchema iSchema) + throws AlreadyExistsException, NoSuchObjectException, MetaException, TException + { + + } + + @Override + public void alter_ischema(AlterISchemaRequest alterISchemaRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public ISchema get_ischema(ISchemaName iSchemaName) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public void drop_ischema(ISchemaName iSchemaName) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + + } + + @Override + public void add_schema_version(SchemaVersion schemaVersion) + throws AlreadyExistsException, NoSuchObjectException, MetaException, TException + { + + } + + @Override + public SchemaVersion get_schema_version(SchemaVersionDescriptor schemaVersionDescriptor) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public SchemaVersion get_schema_latest_version(ISchemaName iSchemaName) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public List get_schema_all_versions(ISchemaName iSchemaName) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public void drop_schema_version(SchemaVersionDescriptor schemaVersionDescriptor) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst findSchemasByColsRqst) + throws MetaException, TException + { + return null; + } + + @Override + public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest mapSchemaVersionToSerdeRequest) + throws NoSuchObjectException, MetaException, TException + { + + } + + @Override + public void set_schema_version_state(SetSchemaVersionStateRequest setSchemaVersionStateRequest) + throws NoSuchObjectException, InvalidOperationException, MetaException, TException + { + + } + + @Override + public void add_serde(SerDeInfo serDeInfo) + throws AlreadyExistsException, MetaException, TException + { + + } + + @Override + public SerDeInfo get_serde(GetSerdeRequest getSerdeRequest) + throws NoSuchObjectException, MetaException, TException + { + return null; + } + + @Override + public LockResponse get_lock_materialization_rebuild(String s, String s1, long l) + throws TException + { + return null; + } + + @Override + public boolean heartbeat_lock_materialization_rebuild(String s, String s1, long l) + throws TException + { + return false; + } + + @Override + public void add_runtime_stats(RuntimeStat runtimeStat) + throws MetaException, TException + { + + } + + @Override + public List get_runtime_stats(GetRuntimeStatsRequest getRuntimeStatsRequest) + throws MetaException, TException + { + return null; + } +} diff --git a/waggle-dance-core/src/test/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerTest.java b/waggle-dance-core/src/test/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerTest.java index 6178d6889..bc4d29fed 100644 --- a/waggle-dance-core/src/test/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerTest.java +++ b/waggle-dance-core/src/test/java/com/hotels/bdp/waggledance/server/FederatedHMSHandlerTest.java @@ -87,7 +87,6 @@ import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; -import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.LockComponent; import org.apache.hadoop.hive.metastore.api.LockLevel; import org.apache.hadoop.hive.metastore.api.LockRequest; @@ -114,8 +113,12 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint; +import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint; import org.apache.hadoop.hive.metastore.api.SQLForeignKey; +import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint; import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey; +import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; @@ -158,7 +161,7 @@ public class FederatedHMSHandlerTest { @Before public void setUp() throws NoSuchObjectException { - handler = new FederatedHMSHandler(databaseMappingService, notifyingFederationService); + handler = new FederatedHMSHandlerHive3(databaseMappingService, notifyingFederationService); when(databaseMappingService.primaryDatabaseMapping()).thenReturn(primaryMapping); when(databaseMappingService.getDatabaseMappings()).thenReturn(Collections.singletonList(primaryMapping)); when(primaryMapping.getClient()).thenReturn(primaryClient); @@ -1057,75 +1060,6 @@ public void isPartitionMarkedForEvent() throws TException { assertThat(result, is(true)); } - @Test - public void add_index() throws TException { - Index newIndex = new Index(); - newIndex.setDbName(DB_P); - Index inboundIndex = new Index(); - Index outboundIndex = new Index(); - Table newTable = new Table(); - newTable.setDbName(DB_P); - Table inboundTable = new Table(); - - when(primaryMapping.transformInboundIndex(newIndex)).thenReturn(inboundIndex); - when(primaryMapping.transformInboundTable(newTable)).thenReturn(inboundTable); - when(primaryMapping.transformOutboundIndex(outboundIndex)).thenReturn(newIndex); - when(primaryClient.add_index(inboundIndex, inboundTable)).thenReturn(outboundIndex); - - Index result = handler.add_index(newIndex, newTable); - verify(primaryMapping, times(2)).checkWritePermissions(DB_P); - assertThat(result, is(newIndex)); - } - - @Test - public void alter_index() throws TException { - Index newIndex = new Index(); - newIndex.setDbName(DB_P); - Index inboundIndex = new Index(); - when(primaryMapping.transformInboundIndex(newIndex)).thenReturn(inboundIndex); - - handler.alter_index(DB_P, "table", "index", newIndex); - verify(primaryMapping, times(2)).checkWritePermissions(DB_P); - verify(primaryClient).alter_index(DB_P, "table", "index", inboundIndex); - } - - @Test - public void drop_index_by_name() throws TException { - when(primaryClient.drop_index_by_name(DB_P, "table", "index", true)).thenReturn(true); - boolean result = handler.drop_index_by_name(DB_P, "table", "index", true); - verify(primaryMapping).checkWritePermissions(DB_P); - assertThat(result, is(true)); - } - - @Test - public void get_index_by_name() throws TException { - Index index = new Index(); - Index outboundIndex = new Index(); - when(primaryClient.get_index_by_name(DB_P, "table", "index")).thenReturn(index); - when(primaryMapping.transformOutboundIndex(index)).thenReturn(outboundIndex); - Index result = handler.get_index_by_name(DB_P, "table", "index"); - assertThat(result, is(outboundIndex)); - } - - @Test - public void get_indexes() throws TException { - List indexList = Collections.singletonList(new Index()); - List outboundIndexList = Collections.singletonList(new Index()); - when(primaryMapping.transformOutboundIndexes(indexList)).thenReturn(outboundIndexList); - when(primaryClient.get_indexes(DB_P, "table", (short) 2)).thenReturn(indexList); - - List result = handler.get_indexes(DB_P, "table", (short) 2); - assertThat(result, is(outboundIndexList)); - } - - @Test - public void get_index_names() throws TException { - List indexNames = Arrays.asList("name1", "name2"); - when(primaryClient.get_index_names(DB_P, "table", (short) 2)).thenReturn(indexNames); - List result = handler.get_index_names(DB_P, "table", (short) 2); - assertThat(result, is(indexNames)); - } - @Test public void update_table_column_statistics() throws TException { ColumnStatisticsDesc columnStatisticsDesc = new ColumnStatisticsDesc(true, DB_P, "table"); @@ -1578,7 +1512,7 @@ public void abort_txns() throws TException { @Test public void add_dynamic_partitions() throws TException { - AddDynamicPartitions request = new AddDynamicPartitions(1, DB_P, "table", Collections.emptyList()); + AddDynamicPartitions request = new AddDynamicPartitions(1,1, DB_P, "table", Collections.emptyList()); AddDynamicPartitions inboundRequest = new AddDynamicPartitions(); when(primaryMapping.transformInboundAddDynamicPartitions(request)).thenReturn(inboundRequest); handler.add_dynamic_partitions(request); @@ -1659,10 +1593,14 @@ public void create_table_with_constraints() throws TException { Table inboundTable = new Table(); List primaryKeys = Collections.emptyList(); List foreignKeys = Collections.emptyList(); + List uniqueConstraints= Collections.emptyList(); + List notNullConstraints = Collections.emptyList(); + List< SQLDefaultConstraint > defaultConstraints = Collections.emptyList(); + List< SQLCheckConstraint > checkConstraints = Collections.emptyList(); when(primaryMapping.transformInboundTable(table)).thenReturn(inboundTable); - handler.create_table_with_constraints(table, primaryKeys, foreignKeys); + handler.create_table_with_constraints(table, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); verify(primaryMapping).checkWritePermissions(DB_P); - verify(primaryClient).create_table_with_constraints(inboundTable, primaryKeys, foreignKeys); + verify(primaryClient).create_table_with_constraints(inboundTable, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints); } @Test diff --git a/waggle-dance-integration-tests/src/main/java/com/hotels/bdp/waggledance/WaggleDanceRunner.java b/waggle-dance-integration-tests/src/main/java/com/hotels/bdp/waggledance/WaggleDanceRunner.java index 910059a64..8cc769dbb 100644 --- a/waggle-dance-integration-tests/src/main/java/com/hotels/bdp/waggledance/WaggleDanceRunner.java +++ b/waggle-dance-integration-tests/src/main/java/com/hotels/bdp/waggledance/WaggleDanceRunner.java @@ -15,8 +15,6 @@ */ package com.hotels.bdp.waggledance; -import static org.apache.directory.api.util.Strings.isNotEmpty; - import static com.google.common.base.Preconditions.checkArgument; import java.io.File; @@ -38,6 +36,7 @@ import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileSystemManager; import org.apache.commons.vfs2.VFS; +import org.apache.logging.log4j.util.Strings; import org.springframework.context.ApplicationContext; import org.yaml.snakeyaml.Yaml; @@ -118,8 +117,8 @@ public Builder configurationProperty(String key, String value) { } public Builder federate(String name, String remoteMetaStoreUris, String... mappableDatabases) { - checkArgument(isNotEmpty(name)); - checkArgument(isNotEmpty(remoteMetaStoreUris)); + checkArgument(Strings.isNotEmpty(name)); + checkArgument(Strings.isNotEmpty(remoteMetaStoreUris)); FederatedMetaStore federatedMetaStore = new FederatedMetaStore(name, remoteMetaStoreUris); federatedMetaStore.setMappedDatabases(Arrays.asList(mappableDatabases)); federatedMetaStore.setLatency(8000L); @@ -146,8 +145,8 @@ public Builder withPrimaryDatabaseNameMappingMap(Map databaseNam } public Builder federate(String name, String remoteMetaStoreUris, List mappedTables, String... mappableDatabases) { - checkArgument(isNotEmpty(name)); - checkArgument(isNotEmpty(remoteMetaStoreUris)); + checkArgument(Strings.isNotEmpty(name)); + checkArgument(Strings.isNotEmpty(remoteMetaStoreUris)); FederatedMetaStore federatedMetaStore = new FederatedMetaStore(name, remoteMetaStoreUris); federatedMetaStore.setMappedDatabases(Arrays.asList(mappableDatabases)); federatedMetaStore.setMappedTables(mappedTables); @@ -162,8 +161,8 @@ public Builder federate( AccessControlType accessControlType, String[] mappableDatabases, String[] writeableDatabaseWhiteList) { - checkArgument(isNotEmpty(name)); - checkArgument(isNotEmpty(remoteMetaStoreUris)); + checkArgument(Strings.isNotEmpty(name)); + checkArgument(Strings.isNotEmpty(remoteMetaStoreUris)); FederatedMetaStore federatedMetaStore = new FederatedMetaStore(name, remoteMetaStoreUris, accessControlType); federatedMetaStore.setMappedDatabases(Arrays.asList(mappableDatabases)); federatedMetaStore.setWritableDatabaseWhiteList(Arrays.asList(writeableDatabaseWhiteList)); @@ -197,8 +196,8 @@ public Builder primary( String remoteMetaStoreUris, AccessControlType accessControlType, String... writableDatabaseWhiteList) { - checkArgument(isNotEmpty(name)); - checkArgument(isNotEmpty(remoteMetaStoreUris)); + checkArgument(Strings.isNotEmpty(name)); + checkArgument(Strings.isNotEmpty(remoteMetaStoreUris)); primaryMetaStore = new PrimaryMetaStore(name, remoteMetaStoreUris, accessControlType, writableDatabaseWhiteList); primaryMetaStore.setLatency(8000L); return this;