Commit 04997e4c by Ballistar13

Generalized Bridges

Replaced BridgeManager Designed EnitityBeans
parent e0699c8f
package org.apache.hadoop.metadata.bridge;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.storage.RepositoryException;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
public abstract class ABridge implements IBridge {
protected ArrayList<Class<AEnitityBean>> typeBeanClasses;
protected MetadataRepository repo;
protected static final Logger LOG = LoggerFactory.getLogger("BridgeLogger");
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);}
public ArrayList<Class<AEnitityBean>> getTypeBeanClasses() {
return typeBeanClasses;
}
@Inject
ABridge(MetadataRepository repo) {
this.repo = repo;
}
public <t extends AEnitityBean>Object get(String id) throws RepositoryException {
// get from the system by id (?)
ITypedReferenceableInstance ref = repo.getEntityDefinition(id);
// turn into a HiveLineageBean
try {
Class<AEnitityBean> c = getTypeBeanInListByName(ref.getTypeName());
return c.newInstance().convertFromITypedReferenceable(ref);
} catch (BridgeException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public String create(AEnitityBean bean) throws MetadataException {
ClassType type = TypeSystem.getInstance().getDataType(ClassType.class, bean.getClass().getSimpleName());
ITypedReferenceableInstance refBean = null;
try {
refBean = type.convert(bean.convertToReferencable(), Multiplicity.REQUIRED);
String id = repo.createEntity(refBean, type.getName());
return id;
} catch (IllegalArgumentException | IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
throw new MetadataException("Cannot create entity");
}
public Iterable<String> list() throws RepositoryException {
List<String> returnList = null;
for(Class c : typeBeanClasses){
List<String> inBetweenList = repo.getEntityList(c.getSimpleName());
try{returnList.addAll(inBetweenList);}
catch(NullPointerException e){returnList = inBetweenList;}
}
return returnList;
}
protected final boolean containsType(String s){
for (Class c: typeBeanClasses){
if (c.getSimpleName().equals(s)){
return true;
}
}
return false;
}
protected final Class<AEnitityBean> getTypeBeanInListByName(String s) throws BridgeException{
if (containsType(s)){
for (Class c: typeBeanClasses){
if (c.getSimpleName().equals(s)){
return c;
}
}
}else{
throw new BridgeException("No EntityBean Definition Found");
}
throw new BridgeException("No EntityBean Definition Found");
}
}
package org.apache.hadoop.metadata.bridge;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.util.Map.Entry;
import org.apache.hadoop.metadata.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.Referenceable;
import org.apache.hadoop.metadata.types.AttributeInfo;
public abstract class AEnitityBean {
public final Referenceable convertToReferencable() throws IllegalArgumentException, IllegalAccessException{
Referenceable selfAware = new Referenceable(this.getClass().getSimpleName());
for(Field f : this.getClass().getFields()){
selfAware.set(f.getName(), f.get(this));
}
return selfAware;
}
public final <t extends AEnitityBean>Object convertFromITypedReferenceable(ITypedReferenceableInstance instance) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException, BridgeException{
if(!instance.getTypeName().equals(this.getClass().getSimpleName())){
throw new BridgeException("ReferenceableInstance type not the same as bean");
}
Object retObj = this.getClass().newInstance();
for (Entry<String, AttributeInfo> e : instance.fieldMapping().fields.entrySet()){
try {
String convertedName = e.getKey().substring(0, 1).toUpperCase()+e.getKey().substring(1);
this.getClass().getMethod("set"+convertedName, Class.forName(e.getValue().dataType().getName())).invoke(this, instance.get(e.getKey()));
} catch (MetadataException | ClassNotFoundException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
return retObj;
}
}
package org.apache.hadoop.metadata.bridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.TypeSystem;
import com.google.common.collect.ImmutableList;
public class BridgeAssistant {
protected HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {return new HierarchicalTypeDefinition(ClassType.class, name, superTypes, attrDefs);}
}
......@@ -4,6 +4,10 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
public class BridgeException extends MetaException {
public BridgeException(String msg) {
super(string);
}
/**
*
*/
......
package org.apache.hadoop.metadata.bridge;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Properties;
import javax.inject.Inject;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem;
public class BridgeManager {
TypeSystem ts;
ArrayList<ABridge> activeBridges;
private final static String bridgeFileDefault = "bridge-manager.properties";
@Inject
BridgeManager(MetadataRepository rs){
this.ts = TypeSystem.getInstance();
if(System.getProperty("bridgeManager.propsFile") != null | !System.getProperty("bridgeManager.propsFile").isEmpty()){
setActiveBridges(System.getProperty("bridgeManager.propsFile"));
}else{
setActiveBridges(System.getProperty(bridgeFileDefault));
}
for (ABridge bridge : activeBridges){
try {
this.loadTypes(bridge, ts);
} catch (MetadataException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
// Handle some kind of errors - waiting on errors concept from typesystem
}
public ArrayList<ABridge> getActiveBridges(){
return this.activeBridges;
}
BridgeManager(TypeSystem ts){
this.ts = ts;
private void setActiveBridges(String bridgePropFileName){
if(bridgePropFileName == null | bridgePropFileName.isEmpty()){
bridgePropFileName = BridgeManager.bridgeFileDefault;
}
ArrayList<ABridge> aBList = new ArrayList<ABridge>();
Properties props = new Properties();
InputStream configStm = this.getClass().getResourceAsStream(bridgePropFileName);
try {
ABridge.LOG.info("Loading : Active Bridge List");
props.load(configStm);
String[] activeBridgeList = ((String)props.get("BridgeManager.activeBridges")).split(",");
ABridge.LOG.info("Loaded : Active Bridge List");
ABridge.LOG.info("First Loaded :" + activeBridgeList[0]);
for (String s : activeBridgeList){
Class<?> bridgeCls = (Class<?>) Class.forName(s);
if(bridgeCls.isAssignableFrom(ABridge.class)){
aBList.add((ABridge) bridgeCls.newInstance());
}
}
} catch (IOException e) {
ABridge.LOG.error(e.getMessage(), e);
e.printStackTrace();
} catch (InstantiationException e) {
ABridge.LOG.error(e.getMessage(), e);
e.printStackTrace();
} catch (IllegalAccessException e) {
ABridge.LOG.error(e.getMessage(), e);
e.printStackTrace();
} catch (ClassNotFoundException e) {
ABridge.LOG.error(e.getMessage(), e);
e.printStackTrace();
}
this.activeBridges = aBList;
}
private final boolean loadTypes(ABridge bridge, TypeSystem ts) throws MetadataException{
for (Class<AEnitityBean> clazz : bridge.getTypeBeanClasses()){
ts.defineClassType(BridgeManager.convertEntityBeanToClassTypeDefinition(clazz));
}
return false;
}
public final static HierarchicalTypeDefinition<ClassType> convertEntityBeanToClassTypeDefinition(Class<? extends AEnitityBean> class1){
ArrayList<AttributeDefinition> attDefAL = new ArrayList<AttributeDefinition>();
for (Field f: class1.getFields()){
try {
attDefAL.add(BridgeManager.convertFieldtoAttributeDefiniton(f));
} catch (MetadataException e) {
ABridge.LOG.error("Class " + class1.getName() + " cannot be converted to TypeDefinition");
e.printStackTrace();
}
}
HierarchicalTypeDefinition<ClassType> typeDef = new HierarchicalTypeDefinition<>(ClassType.class, class1.getSimpleName(),
null, (AttributeDefinition[])attDefAL.toArray());
return typeDef;
}
public final static AttributeDefinition convertFieldtoAttributeDefiniton(Field f) throws MetadataException{
return new AttributeDefinition(f.getName(), f.getType().getSimpleName(), Multiplicity.REQUIRED, false, null);
}
}
package org.apache.hadoop.metadata.bridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.TypeSystem;
import com.google.common.collect.ImmutableList;
public interface Bridge {
public interface IBridge {
boolean defineBridgeTypes(TypeSystem ts);
}
package org.apache.hadoop.metadata.bridge.hivelineage;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.bridge.Bridge;
import org.apache.hadoop.metadata.bridge.ABridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem;
public class HiveLineageBridge implements Bridge {
public class HiveLineageBridge extends ABridge {
static final String LINEAGE_CLASS_TYPE = "HiveLineage";
......
......@@ -31,6 +31,7 @@ public class HiveMetaImporter {
try {
this.repo = repo;
msc = new HiveMetaStoreClient(new HiveConf());
// TODO Get hive-site.conf from class path first
} catch (MetaException e) {
// TODO Auto-generated catch block
e.printStackTrace();
......
......@@ -3,8 +3,7 @@ package org.apache.hadoop.metadata.bridge.hivestructure;
import java.util.ArrayList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.bridge.Bridge;
import org.apache.hadoop.metadata.bridge.BridgeAssistant;
import org.apache.hadoop.metadata.bridge.ABridge;
import org.apache.hadoop.metadata.types.AttributeDefinition;
import org.apache.hadoop.metadata.types.ClassType;
import org.apache.hadoop.metadata.types.HierarchicalTypeDefinition;
......@@ -12,7 +11,7 @@ import org.apache.hadoop.metadata.types.Multiplicity;
import org.apache.hadoop.metadata.types.TypeSystem;
public class HiveStructureBridge extends BridgeAssistant implements Bridge{
public class HiveStructureBridge extends ABridge{
static final String DB_CLASS_TYPE = "HiveDatabase";
static final String TB_CLASS_TYPE = "HiveTable";
......@@ -20,6 +19,9 @@ public class HiveStructureBridge extends BridgeAssistant implements Bridge{
@Override
public boolean defineBridgeTypes(TypeSystem ts) {
ArrayList<HierarchicalTypeDefinition<?>> al = new ArrayList<HierarchicalTypeDefinition<?>>();
// TODO
//convert to helper methods
// Add to arrayList
try{
HierarchicalTypeDefinition<ClassType> databaseClassTypeDef = new HierarchicalTypeDefinition<ClassType>("ClassType",DB_CLASS_TYPE, null,
......
#BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths)
#
BridgeManager.activebridges=org.apache.hadoop.metadata.bridge.hivelineage
\ No newline at end of file
package org.apache.hadoop.metadata.bridge.test;
import org.testng.annotations.Test;
public class BridgeManagerTest{
@Test
public void testLoadPropertiesFile() throws Exception {
}
}
#BridgeManager.activebridges denotes which bridge defintions to load from the classpath (Comma seperated list of fully qualified class paths)
#
BridgeManager.activebridges=org.apache.hadoop.metadata.bridge.hivelineage
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment