Commit b9cae771 by Dennis Fusaro

Submitting Initial Bridge Code - Stubbed tests to come in next commit

for bridge
parent 98d436c2
package org.apache.hadoop.metadata.bridge; package org.apache.hadoop.metadata.bridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
import com.google.common.collect.ImmutableList;
public interface Bridge { public interface Bridge {
boolean defineBridgeTypes(TypeSystem ts); boolean defineBridgeTypes(TypeSystem ts);
} }
package org.apache.hadoop.metadata.bridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.TypeSystem;
import com.google.common.collect.ImmutableList;
public class BridgeAssistant {
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);}
}
package org.apache.hadoop.metadata.bridge;
import org.apache.hadoop.hive.metastore.api.MetaException;
public class BridgeException extends MetaException {
/**
*
*/
private static final long serialVersionUID = -384401342591560473L;
}
package org.apache.hadoop.metadata.bridge.hivelineage;
import org.apache.hadoop.metadata.bridge.Bridge;
import org.apache.hadoop.metadata.types.TypeSystem;
public class HiveLineageBridge implements Bridge {
@Override
public boolean defineBridgeTypes(TypeSystem ts) {
// TODO Auto-generated method stub
return false;
}
}
package org.apache.hadoop.metadata.bridge.hivestructure;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataService;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.thrift.TException;
/*
* Initial pass at one time importer TODO - needs re-write
*/
public class HiveMetaImporter {
private static HiveMetaStoreClient msc;
private static MetadataService ms;
public HiveMetaImporter(MetadataService ms){
try {
this.ms = ms;
msc = new HiveMetaStoreClient(new HiveConf());
} catch (MetaException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static boolean fullImport(){
try{
databasesImport();
for (String dbName : msc.getAllDatabases()){
tablesImport(dbName);
for(String tbName : msc.getAllTables(dbName)){
fieldsImport(dbName,tbName);
}
return true;
}
}catch(MetaException me){
me.printStackTrace();
}catch(RepositoryException re){
re.printStackTrace();
}
return false;
}
public static boolean databasesImport() throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.DB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
for(String dbName : msc.getAllDatabases()){
databaseImport(dbName);
}
return true;
}
public static boolean databaseImport(String dbName) throws MetaException, RepositoryException{
try {
Database db = msc.getDatabase(dbName);
Referenceable dbRef = new Referenceable(HiveStructureBridge.DB_CLASS_TYPE);
dbRef.set("DESC", db.getDescription());
dbRef.set("DB_LOCATION_URI", db.getLocationUri());
dbRef.set("NAME", db.getName());
if(db.isSetOwnerType()){dbRef.set("OWNER_TYPE", db.getOwnerType());}
if(db.isSetOwnerName()){dbRef.set("OWNER_NAME", db.getOwnerName());}
ms.getRepository().create(dbRef);
} catch (NoSuchObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public static boolean tablesImport(String dbName) throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.TB_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
for(String tbName : msc.getAllTables(dbName)){
tableImport(dbName, tbName);
}
return true;
}
public static boolean tableImport(String dbName, String tbName) throws MetaException, RepositoryException{
try {
Table tb = msc.getTable(dbName, tbName);
Referenceable tbRef = new Referenceable(HiveStructureBridge.TB_CLASS_TYPE);
tbRef.set("CREATE_TIME", tb.getCreateTime());
tbRef.set("LAST_ACCESS_TIME", tb.getLastAccessTime());
tbRef.set("OWNER", tb.getOwner());
tbRef.set("TBL_NAME", tb.getTableName());
tbRef.set("TBL_TYPE", tb.getTableType());
if(tb.isSetViewExpandedText()){tbRef.set("VIEW_EXPANDED_TEXT", tb.getViewExpandedText());}
if(tb.isSetViewOriginalText()){tbRef.set("VIEW_ORIGINAL_TEXT", tb.getViewOriginalText());}
ms.getRepository().create(tbRef);
} catch (NoSuchObjectException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public static boolean fieldsImport (String dbName, String tbName) throws MetaException, RepositoryException{
ClassType classType = null;
try {
classType = ms.getTypeSystem().getDataType(ClassType.class, HiveStructureBridge.FD_CLASS_TYPE);
} catch (MetadataException e1) {
e1.printStackTrace();
}
try {
for(FieldSchema fs : msc.getFields(dbName, tbName)){
Referenceable fdRef = new Referenceable(HiveStructureBridge.FD_CLASS_TYPE);
if(fs.isSetComment()){fdRef.set("COMMENT", fs.getName());}
fdRef.set("COLUMN_NAME", fs.getName());
fdRef.set("TYPE_NAME", fs.getType());
ms.getRepository().create(fdRef);
}
} catch (UnknownTableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnknownDBException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
public static boolean fieldImport(String dbName, String tbName, String fdName) throws MetaException{
try {
for(FieldSchema fs : msc.getFields(dbName, tbName)){
if (fs.getName().equals(fs)){
Referenceable fdRef = new Referenceable(HiveStructureBridge.TB_CLASS_TYPE);
if(fs.isSetComment()){fdRef.set("COMMENT", fs.getName());}
fdRef.set("COLUMN_NAME", fs.getName());
fdRef.set("TYPE_NAME", fs.getType());
//SaveObject to MS Backend
return true;
}
}
} catch (UnknownTableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnknownDBException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return true;
}
}
package org.apache.hadoop.metadata.bridge.hivestructure; package org.apache.hadoop.metadata.bridge.hivestructure;
import java.util.ArrayList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.bridge.Bridge; import org.apache.hadoop.metadata.bridge.Bridge;
import org.apache.hadoop.metadata.bridge.BridgeAssistant;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem; import org.apache.hadoop.metadata.types.TypeSystem;
public class HiveStructureBridge implements Bridge {
public class HiveStructureBridge extends BridgeAssistant implements Bridge{
static final String DB_CLASS_TYPE = "HiveDatabase";
static final String TB_CLASS_TYPE = "HiveTable";
static final String FD_CLASS_TYPE = "HiveField";
@Override @Override
public boolean defineBridgeTypes(TypeSystem ts) { public boolean defineBridgeTypes(TypeSystem ts) {
// TODO Auto-generated method stub ArrayList<HierarchicalTypeDefinition<?>> al = new ArrayList<HierarchicalTypeDefinition<?>>();
try{
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef = new HierarchicalTypeDefinition<ClassType>("ClassType",DB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("DESC", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("DB_LOCATION_URI", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER_TYPE", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("OWNER_NAME", "STRING_TYPE", Multiplicity.OPTIONAL, false, null)
}
);
HierarchicalTypeDefinition<ClassType> tableClassTypeDef = new HierarchicalTypeDefinition<ClassType>("ClassType",TB_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("CREATE_TIME", "LONG_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("LAST_ACCESS_TIME", "LONG_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("OWNER", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TBL_TYPE", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("VIEW_EXPANDED_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("VIEW_ORIGINAL_TEXT", "STRING_TYPE", Multiplicity.OPTIONAL, false, null)
}
);
HierarchicalTypeDefinition<ClassType> columnClassTypeDef = new HierarchicalTypeDefinition<ClassType>("ClassType",FD_CLASS_TYPE, null,
new AttributeDefinition[]{
new AttributeDefinition("COMMENT", "STRING_TYPE", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("COLUMN_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("TYPE_NAME", "STRING_TYPE", Multiplicity.REQUIRED, false, null)
}
);
}catch(ClassNotFoundException e){
e.printStackTrace();
}
for (HierarchicalTypeDefinition htd : al){
try {
ts.defineClassType(htd);
} catch (MetadataException e) {
System.out.println(htd.hierarchicalMetaTypeName + "could not be added to the type system");
e.printStackTrace();
}
}
return false; return false;
} }
......
No preview for this file type
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment