public interface HiveShim extends Serializable
Modifier and Type | Method and Description |
---|---|
void |
alterPartition(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName,
String tableName,
org.apache.hadoop.hive.metastore.api.Partition partition) |
void |
alterTable(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName,
String tableName,
org.apache.hadoop.hive.metastore.api.Table table)
Alters a Hive table.
|
org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo |
createUDAFParameterInfo(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector[] params,
boolean isWindowing,
boolean distinct,
boolean allColumns)
Creates SimpleGenericUDAFParameterInfo.
|
Optional<org.apache.hadoop.hive.ql.exec.FunctionInfo> |
getBuiltInFunctionInfo(String name)
Get a Hive built-in function by name.
|
Class<?> |
getDateDataTypeClass()
Hive Date data type class was changed in Hive 3.1.0.
|
List<org.apache.hadoop.hive.metastore.api.FieldSchema> |
getFieldsFromDeserializer(Configuration conf,
org.apache.hadoop.hive.metastore.api.Table table,
boolean skipConfError)
Get Hive table schema from deserializer.
|
org.apache.hadoop.fs.FileStatus[] |
getFileStatusRecurse(org.apache.hadoop.fs.Path path,
int level,
org.apache.hadoop.fs.FileSystem fs)
The return type of HiveStatsUtils.getFileStatusRecurse was changed from array to List in Hive 3.1.0.
|
org.apache.hadoop.hive.metastore.IMetaStoreClient |
getHiveMetastoreClient(org.apache.hadoop.hive.conf.HiveConf hiveConf)
Create a Hive Metastore client based on the given HiveConf object.
|
Class<?> |
getHiveMetaStoreUtilsClass()
Get the class of Hive's HiveMetaStoreUtils as it was split from MetaStoreUtils class in Hive 3.1.0.
|
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter |
getHiveRecordWriter(org.apache.hadoop.mapred.JobConf jobConf,
String outputFormatClzName,
Class<? extends org.apache.hadoop.io.Writable> outValClz,
boolean isCompressed,
Properties tableProps,
org.apache.hadoop.fs.Path outPath)
Get Hive's FileSinkOperator.RecordWriter.
|
Class<?> |
getMetaStoreUtilsClass()
Get the class of Hive's MetaStoreUtils because its package name was changed in Hive 3.1.0.
|
Set<String> |
getNotNullColumns(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
Configuration conf,
String dbName,
String tableName)
Get the set of columns that have NOT NULL constraints.
|
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector |
getObjectInspectorForConstant(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo primitiveTypeInfo,
Object value)
Get ObjectInspector for a constant value.
|
Optional<UniqueConstraint> |
getPrimaryKey(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String dbName,
String tableName,
byte requiredTrait)
Get the primary key of a Hive table and convert it to a UniqueConstraint.
|
Class<?> |
getTimestampDataTypeClass()
Hive Timestamp data type class was changed in Hive 3.1.0.
|
List<String> |
getViews(org.apache.hadoop.hive.metastore.IMetaStoreClient client,
String databaseName)
Get a list of views in the given database from the given Hive Metastore client.
|
boolean |
isDateStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData colStatsData)
Whether a Hive ColumnStatisticsData is for DATE columns.
|
Set<String> |
listBuiltInFunctions()
List names of all built-in functions.
|
void |
makeSpecFromName(Map<String,String> partSpec,
org.apache.hadoop.fs.Path currPath)
The signature of HiveStatsUtils.makeSpecFromName() was changed in Hive 3.1.0.
|
boolean |
moveToTrash(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path,
Configuration conf,
boolean purge)
Moves a particular file or directory to trash.
|
java.time.LocalDate |
toFlinkDate(Object hiveDate)
Converts a hive date instance to LocalDate which is expected by DataFormatConverter.
|
CatalogColumnStatisticsDataDate |
toFlinkDateColStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData hiveDateColStats)
Generate Flink CatalogColumnStatisticsDataDate from Hive ColumnStatisticsData for DATE columns.
|
java.time.LocalDateTime |
toFlinkTimestamp(Object hiveTimestamp)
Converts a hive timestamp instance to LocalDateTime which is expected by DataFormatConverter.
|
Object |
toHiveDate(Object flinkDate)
Converts a Flink date instance to what's expected by Hive.
|
org.apache.hadoop.hive.metastore.api.ColumnStatisticsData |
toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats)
Generate Hive ColumnStatisticsData from Flink CatalogColumnStatisticsDataDate for DATE columns.
|
Object |
toHiveTimestamp(Object flinkTimestamp)
Converts a Flink timestamp instance to what's expected by Hive.
|
org.apache.hadoop.hive.metastore.IMetaStoreClient getHiveMetastoreClient(org.apache.hadoop.hive.conf.HiveConf hiveConf)
hiveConf
- HiveConf instanceList<String> getViews(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName) throws org.apache.hadoop.hive.metastore.api.UnknownDBException, org.apache.thrift.TException
client
- Hive Metastore clientdatabaseName
- the name of the databaseorg.apache.hadoop.hive.metastore.api.UnknownDBException
- if the database doesn't existorg.apache.thrift.TException
- for any other generic exceptions caused by Thriftboolean moveToTrash(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path, Configuration conf, boolean purge) throws IOException
This interface is here because FileUtils.moveToTrash in different Hive versions have different signatures.
fs
- the FileSystem to usepath
- the path of the file or directory to be moved to trash.conf
- the Configuration to usepurge
- whether try to skip trash and directly delete the file/directory. This flag may be ignored by
old Hive versions prior to 2.3.0.IOException
- if the file/directory cannot be properly moved or deletedvoid alterTable(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName, String tableName, org.apache.hadoop.hive.metastore.api.Table table) throws org.apache.hadoop.hive.metastore.api.InvalidOperationException, org.apache.hadoop.hive.metastore.api.MetaException, org.apache.thrift.TException
client
- the Hive metastore clientdatabaseName
- the name of the database to which the table belongstableName
- the name of the table to be alteredtable
- the new Hive tableorg.apache.hadoop.hive.metastore.api.InvalidOperationException
org.apache.hadoop.hive.metastore.api.MetaException
org.apache.thrift.TException
void alterPartition(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String databaseName, String tableName, org.apache.hadoop.hive.metastore.api.Partition partition) throws org.apache.hadoop.hive.metastore.api.InvalidOperationException, org.apache.hadoop.hive.metastore.api.MetaException, org.apache.thrift.TException
org.apache.hadoop.hive.metastore.api.InvalidOperationException
org.apache.hadoop.hive.metastore.api.MetaException
org.apache.thrift.TException
org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo createUDAFParameterInfo(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector[] params, boolean isWindowing, boolean distinct, boolean allColumns)
Class<?> getMetaStoreUtilsClass()
Class<?> getHiveMetaStoreUtilsClass()
Class<?> getDateDataTypeClass()
Class<?> getTimestampDataTypeClass()
org.apache.hadoop.fs.FileStatus[] getFileStatusRecurse(org.apache.hadoop.fs.Path path, int level, org.apache.hadoop.fs.FileSystem fs) throws IOException
path
- the path of the directorylevel
- the level of recursionfs
- the file system of the directoryIOException
- in case of any io errorvoid makeSpecFromName(Map<String,String> partSpec, org.apache.hadoop.fs.Path currPath)
partSpec
- partition specscurrPath
- the current pathorg.apache.hadoop.hive.serde2.objectinspector.ObjectInspector getObjectInspectorForConstant(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo primitiveTypeInfo, Object value)
org.apache.hadoop.hive.metastore.api.ColumnStatisticsData toHiveDateColStats(CatalogColumnStatisticsDataDate flinkDateColStats)
boolean isDateStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData colStatsData)
CatalogColumnStatisticsDataDate toFlinkDateColStats(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData hiveDateColStats)
org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(org.apache.hadoop.mapred.JobConf jobConf, String outputFormatClzName, Class<? extends org.apache.hadoop.io.Writable> outValClz, boolean isCompressed, Properties tableProps, org.apache.hadoop.fs.Path outPath)
List<org.apache.hadoop.hive.metastore.api.FieldSchema> getFieldsFromDeserializer(Configuration conf, org.apache.hadoop.hive.metastore.api.Table table, boolean skipConfError)
Optional<org.apache.hadoop.hive.ql.exec.FunctionInfo> getBuiltInFunctionInfo(String name)
Set<String> getNotNullColumns(org.apache.hadoop.hive.metastore.IMetaStoreClient client, Configuration conf, String dbName, String tableName)
Optional<UniqueConstraint> getPrimaryKey(org.apache.hadoop.hive.metastore.IMetaStoreClient client, String dbName, String tableName, byte requiredTrait)
Object toHiveTimestamp(Object flinkTimestamp)
java.time.LocalDateTime toFlinkTimestamp(Object hiveTimestamp)
Object toHiveDate(Object flinkDate)
java.time.LocalDate toFlinkDate(Object hiveDate)
Copyright © 2014–2019 The Apache Software Foundation. All rights reserved.