Commit 858348b0 by Venkatesh Seetharam

Add Hive Data Model Generator with minor refactoring

parent a2e16bb9
......@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.metadata.hivetypes;
package org.apache.hadoop.metadata.hive.hook;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
......@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.metadata.hivetypes.HiveTypeSystem;
import org.apache.hadoop.util.StringUtils;
import org.json.JSONException;
import org.json.JSONObject;
......@@ -61,7 +62,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/**
* DgiHook sends lineage information to the DgiSever
* DgiHook sends lineage information to the DgiSever.
*/
public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHook {
......@@ -75,9 +76,8 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
private static final String user = "postgres";
private static final String password = "postgres";
private static final String insertQuery =
"insert into query_info(query_id, query_text, query_plan, start_time, user_name, " +
"query_graph) "
+ "values (?, ?, ?, ?, ?, ?";
"insert into query_info(query_id, query_text, query_plan, start_time, user_name, "
+ "query_graph) values (?, ?, ?, ?, ?, ?";
private static final String updateQuery =
"update query_info set end_time = ? where query_id = ?";
private static ExecutorService executor;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.model;
/**
* Hive Data Types for model and bridge.
*/
public enum HiveDataTypes {
// Enums
HIVE_OBJECTTYPE,
HIVE_PRINCIPALTYPE,
HIVE_RESOURCETYPE,
HIVE_FUNCTIONTYPE,
// Structs
HIVE_SERDE,
HIVE_SKEWEDINFO,
HIVE_ORDER,
HIVE_RESOURCEURI,
// Classes
HIVE_DB,
HIVE_STORAGEDESC,
HIVE_TABLE,
HIVE_COLUMN,
HIVE_PARTITION,
HIVE_INDEX,
HIVE_FUNCTION,
HIVE_ROLE,
HIVE_TYPE,
HIVE_PROCESS,
// HIVE_VIEW,
}
......@@ -40,6 +40,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* todo: remove this.
*/
@Deprecated
public class HiveTypeSystem {
......
......@@ -25,7 +25,7 @@ import com.tinkerpop.blueprints.Vertex;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
......@@ -46,8 +46,6 @@ import java.util.List;
@Test(enabled = false)
public class HiveGraphRepositoryTest {
public static final String HIVE_L4J_PROPS = "target/hive-log4j.properties";
public static final String HIVE_EXEC_L4J_PROPS = "target/hive-exec-log4j.properties";
private static final Logger LOG =
LoggerFactory.getLogger(HiveGraphRepositoryTest.class);
protected HiveTypeSystem hts;
......
......@@ -21,10 +21,10 @@ package org.apache.hadoop.metadata.hivetypes;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.persistence.memory.MemRepository;
import org.apache.hadoop.metadata.repository.memory.MemRepository;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -105,7 +105,7 @@ public class HiveTypeSystemTest {
}
@Test(enabled = true)
public void testHiveLineage() throws MetaException, MetadataException, IOException, Exception {
public void testHiveLineage() throws Exception {
Class.forName("org.apache.hive.jdbc.HiveDriver");
String url = "jdbc:hive2://" + hiveHost + ":" + hivePort;
Connection con = DriverManager.getConnection(url, "ambari-qa", "");
......@@ -114,8 +114,5 @@ public class HiveTypeSystemTest {
stmt.execute("create table t(a int, b string)");
stmt.execute("drop table if exists t2");
stmt.execute("create table t2 as select * from t");
}
}
\ No newline at end of file
......@@ -112,6 +112,7 @@
<excludes>
<exclude>*.txt</exclude>
<exclude>**/*.txt</exclude>
<exclude>**/*.json</exclude>
<exclude>.git/**</exclude>
<exclude>.gitignore</exclude>
<exclude>**/.idea/**</exclude>
......
......@@ -27,7 +27,6 @@ import java.lang.reflect.Constructor;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
......
......@@ -21,6 +21,8 @@ package org.apache.hadoop.metadata.typesystem.types.utils;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
......@@ -57,15 +59,17 @@ public class TypesUtil {
Multiplicity.REQUIRED, false, null);
}
@SuppressWarnings("unchecked")
public static EnumTypeDefinition createEnumTypeDef(String name, EnumValue... enumValues) {
return new EnumTypeDefinition(name, enumValues);
}
public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(TraitType.class, name, superTypes, attrDefs);
return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs);
}
@SuppressWarnings("unchecked")
public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);
return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment