Commit 77b11f26 by Venkatesh Seetharam

BUG-32828 API to list available traits from the type system. Contributed by Venkatesh Seetharam

parent c7b82387
...@@ -29,7 +29,7 @@ You would need the following installed: ...@@ -29,7 +29,7 @@ You would need the following installed:
1. Building Metadata 1. Building Metadata
-------------------- --------------------
Building metadata from the source repository Building DGI from the source repository
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* git clone git@github.com:hortonworks/metadata.git metadata * git clone git@github.com:hortonworks/metadata.git metadata
...@@ -37,7 +37,7 @@ Building metadata from the source repository ...@@ -37,7 +37,7 @@ Building metadata from the source repository
* export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install * export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=256m" && mvn clean install
2. Deploying Metadata 2. Deploying DGI
--------------------- ---------------------
Once the build successfully completes, artifacts can be packaged for deployment. Once the build successfully completes, artifacts can be packaged for deployment.
...@@ -65,21 +65,21 @@ Tar is structured as follows ...@@ -65,21 +65,21 @@ Tar is structured as follows
|- DISCLAIMER.txt |- DISCLAIMER.txt
|- CHANGES.txt |- CHANGES.txt
3. Installing & running Metadata 3. Installing & running DGI
-------------------------------- --------------------------------
a. Installing Metadata a. Installing DGI
~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~
* tar -xzvf apache-metadata-${project.version}-bin.tar.gz * tar -xzvf apache-metadata-${project.version}-bin.tar.gz
* cd metadata-${project.version} * cd metadata-${project.version}
b. Starting Metadata Server b. Starting DGI Server
~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~
* bin/metadata-start.sh * bin/metadata-start.sh
c. Using Falcon c. Using DGI
~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~
* Verify if the server is up and running * Verify if the server is up and running
...@@ -99,7 +99,7 @@ c. Using Falcon ...@@ -99,7 +99,7 @@ c. Using Falcon
* Search for entities (instances) in the repository * Search for entities (instances) in the repository
curl -v http://localhost:21000/api/metadata/discovery/search/dsl?query="from hive_table" curl -v http://localhost:21000/api/metadata/discovery/search/dsl?query="from hive_table"
d. Stopping Falcon Server d. Stopping DGI Server
~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~
* bin/metadata-stop.sh * bin/metadata-stop.sh
...@@ -24,7 +24,6 @@ import org.apache.hadoop.metadata.discovery.SearchIndexer; ...@@ -24,7 +24,6 @@ import org.apache.hadoop.metadata.discovery.SearchIndexer;
import org.apache.hadoop.metadata.listener.EntityChangeListener; import org.apache.hadoop.metadata.listener.EntityChangeListener;
import org.apache.hadoop.metadata.listener.TypesChangeListener; import org.apache.hadoop.metadata.listener.TypesChangeListener;
import org.apache.hadoop.metadata.repository.MetadataRepository; import org.apache.hadoop.metadata.repository.MetadataRepository;
import org.apache.hadoop.metadata.repository.RepositoryException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
...@@ -134,6 +133,16 @@ public class DefaultMetadataService implements MetadataService { ...@@ -134,6 +133,16 @@ public class DefaultMetadataService implements MetadataService {
} }
/** /**
* Return the list of trait type names in the type system.
*
* @return list of trait type names in the type system
*/
@Override
public List<String> getTraitNamesList() throws MetadataException {
return typeSystem.getTraitsNames();
}
/**
* Creates an entity, instance of the type. * Creates an entity, instance of the type.
* *
* @param entityType type * @param entityType type
...@@ -186,15 +195,8 @@ public class DefaultMetadataService implements MetadataService { ...@@ -186,15 +195,8 @@ public class DefaultMetadataService implements MetadataService {
Preconditions.checkNotNull(entityType, "entity type cannot be null"); Preconditions.checkNotNull(entityType, "entity type cannot be null");
// verify if the type exists // verify if the type exists
String existingTypeDefinition = null; if (!typeSystem.isRegistered(entityType)) {
try { throw new MetadataException("type is not defined for : " + entityType);
existingTypeDefinition = getTypeDefinition(entityType);
} catch (MetadataException ignore) {
// do nothing
}
if (existingTypeDefinition == null) {
throw new RepositoryException("type is not defined for : " + entityType);
} }
} }
......
...@@ -48,13 +48,20 @@ public interface MetadataService { ...@@ -48,13 +48,20 @@ public interface MetadataService {
String getTypeDefinition(String typeName) throws MetadataException; String getTypeDefinition(String typeName) throws MetadataException;
/** /**
* Return the list of types in the repository. * Return the list of types in the type system.
* *
* @return list of type names in the repository * @return list of type names in the type system
*/ */
List<String> getTypeNamesList() throws MetadataException; List<String> getTypeNamesList() throws MetadataException;
/** /**
* Return the list of trait type names in the type system.
*
* @return list of trait type names in the type system
*/
List<String> getTraitNamesList() throws MetadataException;
/**
* Creates an entity, instance of the type. * Creates an entity, instance of the type.
* *
* @param entityType type * @param entityType type
......
...@@ -34,6 +34,7 @@ import java.util.LinkedHashSet; ...@@ -34,6 +34,7 @@ import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@Singleton @Singleton
public class TypeSystem { public class TypeSystem {
...@@ -43,6 +44,12 @@ public class TypeSystem { ...@@ -43,6 +44,12 @@ public class TypeSystem {
private Map<String, IDataType> types; private Map<String, IDataType> types;
private IdType idType; private IdType idType;
/**
* An in-memory copy of list of traits for convenience.
*/
private List<String> traitTypes;
private TypeSystem() { private TypeSystem() {
initialize(); initialize();
} }
...@@ -60,7 +67,9 @@ public class TypeSystem { ...@@ -60,7 +67,9 @@ public class TypeSystem {
} }
private void initialize() { private void initialize() {
types = new HashMap<>(); types = new ConcurrentHashMap<>();
traitTypes = new ArrayList<>();
registerPrimitiveTypes(); registerPrimitiveTypes();
registerCoreTypes(); registerCoreTypes();
} }
...@@ -69,6 +78,14 @@ public class TypeSystem { ...@@ -69,6 +78,14 @@ public class TypeSystem {
return ImmutableList.copyOf(types.keySet()); return ImmutableList.copyOf(types.keySet());
} }
public ImmutableList<String> getTraitsNames() {
return ImmutableList.copyOf(traitTypes);
}
private void addTraitName(String traitName) {
traitTypes.add(traitName);
}
private void registerPrimitiveTypes() { private void registerPrimitiveTypes() {
types.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE); types.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE);
types.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE); types.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE);
...@@ -145,9 +162,9 @@ public class TypeSystem { ...@@ -145,9 +162,9 @@ public class TypeSystem {
* construct a temporary StructType for a Query Result. This is not registered in the * construct a temporary StructType for a Query Result. This is not registered in the
* typeSystem. * typeSystem.
* The attributes in the typeDefinition can only reference permanent types. * The attributes in the typeDefinition can only reference permanent types.
* @param name * @param name struct type name
* @param attrDefs * @param attrDefs struct type definition
* @return * @return temporary struct type
* @throws MetadataException * @throws MetadataException
*/ */
public StructType defineQueryResultType(String name, public StructType defineQueryResultType(String name,
...@@ -158,8 +175,8 @@ public class TypeSystem { ...@@ -158,8 +175,8 @@ public class TypeSystem {
for (int i = 0; i < attrDefs.length; i++) { for (int i = 0; i < attrDefs.length; i++) {
infos[i] = new AttributeInfo(this, attrDefs[i]); infos[i] = new AttributeInfo(this, attrDefs[i]);
} }
StructType type = new StructType(TypeSystem.this, name, null, infos);
return type; return new StructType(this, name, null, infos);
} }
public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef) public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef)
...@@ -183,8 +200,7 @@ public class TypeSystem { ...@@ -183,8 +200,7 @@ public class TypeSystem {
} }
public Map<String, IDataType> defineTraitTypes( public Map<String, IDataType> defineTraitTypes(
HierarchicalTypeDefinition<TraitType>... traitDefs) HierarchicalTypeDefinition<TraitType>... traitDefs) throws MetadataException {
throws MetadataException {
TransientTypeSystem transientTypes = new TransientTypeSystem( TransientTypeSystem transientTypes = new TransientTypeSystem(
ImmutableList.<StructTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
ImmutableList.copyOf(traitDefs), ImmutableList.copyOf(traitDefs),
...@@ -441,11 +457,11 @@ public class TypeSystem { ...@@ -441,11 +457,11 @@ public class TypeSystem {
} }
try { try {
Constructor<U> cons = cls.getDeclaredConstructor(new Class[]{ Constructor<U> cons = cls.getDeclaredConstructor(
TypeSystem.class, TypeSystem.class,
String.class, String.class,
ImmutableList.class, ImmutableList.class,
AttributeInfo[].class}); AttributeInfo[].class);
U type = cons.newInstance(TypeSystem.this, def.typeName, def.superTypes, infos); U type = cons.newInstance(TypeSystem.this, def.typeName, def.superTypes, infos);
TypeSystem.this.types.put(def.typeName, type); TypeSystem.this.types.put(def.typeName, type);
return type; return type;
...@@ -481,13 +497,13 @@ public class TypeSystem { ...@@ -481,13 +497,13 @@ public class TypeSystem {
for (TraitType traitType : traitTypes) { for (TraitType traitType : traitTypes) {
constructHierarchicalType(TraitType.class, constructHierarchicalType(TraitType.class,
traitNameToDefMap.get(traitType.getName())); traitNameToDefMap.get(traitType.getName()));
addTraitName(traitType.getName());
} }
for (ClassType classType : classTypes) { for (ClassType classType : classTypes) {
constructHierarchicalType(ClassType.class, constructHierarchicalType(ClassType.class,
classNameToDefMap.get(classType.getName())); classNameToDefMap.get(classType.getName()));
} }
} }
/* /*
......
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
package org.apache.hadoop.metadata.typesystem.json; package org.apache.hadoop.metadata.typesystem.json;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.BaseTest; import org.apache.hadoop.metadata.typesystem.types.BaseTest;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.*; import org.apache.hadoop.metadata.typesystem.*;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
......
...@@ -16,25 +16,15 @@ ...@@ -16,25 +16,15 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.IDataType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.junit.Before; import org.junit.Before;
...@@ -49,8 +39,8 @@ public abstract class BaseTest { ...@@ -49,8 +39,8 @@ public abstract class BaseTest {
public static final String STRUCT_TYPE_2 = "t2"; public static final String STRUCT_TYPE_2 = "t2";
public static Struct createStruct() throws MetadataException { public static Struct createStruct() throws MetadataException {
StructType structType = (StructType) TypeSystem.getInstance() StructType structType = TypeSystem.getInstance().getDataType(
.getDataType(StructType.class, STRUCT_TYPE_1); StructType.class, STRUCT_TYPE_1);
Struct s = new Struct(structType.getName()); Struct s = new Struct(structType.getName());
s.set("a", 1); s.set("a", 1);
s.set("b", true); s.set("b", true);
...@@ -64,10 +54,10 @@ public abstract class BaseTest { ...@@ -64,10 +54,10 @@ public abstract class BaseTest {
s.set("j", BigInteger.valueOf(1L)); s.set("j", BigInteger.valueOf(1L));
s.set("k", new BigDecimal(1)); s.set("k", new BigDecimal(1));
s.set("l", new Date(1418265358440L)); s.set("l", new Date(1418265358440L));
s.set("m", Lists.<Integer>asList(Integer.valueOf(1), new Integer[]{Integer.valueOf(1)})); s.set("m", Lists.asList(1, new Integer[]{1}));
s.set("n", Lists.<BigDecimal>asList(BigDecimal.valueOf(1.1), s.set("n", Lists.asList(BigDecimal.valueOf(1.1),
new BigDecimal[]{BigDecimal.valueOf(1.1)})); new BigDecimal[]{BigDecimal.valueOf(1.1)}));
Map<String, Double> hm = Maps.<String, Double>newHashMap(); Map<String, Double> hm = Maps.newHashMap();
hm.put("a", 1.0); hm.put("a", 1.0);
hm.put("b", 2.0); hm.put("b", 2.0);
s.set("o", hm); s.set("o", hm);
...@@ -80,7 +70,6 @@ public abstract class BaseTest { ...@@ -80,7 +70,6 @@ public abstract class BaseTest {
@Before @Before
public void setup() throws Exception { public void setup() throws Exception {
TypeSystem ts = TypeSystem.getInstance(); TypeSystem ts = TypeSystem.getInstance();
ts.reset(); ts.reset();
...@@ -102,16 +91,17 @@ public abstract class BaseTest { ...@@ -102,16 +91,17 @@ public abstract class BaseTest {
TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
TypesUtil.createOptionalAttrDef("o", TypesUtil.createOptionalAttrDef("o",
ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))); ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)));
System.out.println("defined structType = " + structType);
StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2, StructType recursiveStructType = ts.defineStructType(STRUCT_TYPE_2,
true, true,
TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2)); TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
System.out.println("defined recursiveStructType = " + recursiveStructType);
} }
protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs) protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition... tDefs)
throws MetadataException { throws MetadataException {
return getTypeSystem().defineTraitTypes(tDefs); return getTypeSystem().defineTraitTypes(tDefs);
} }
...@@ -144,7 +134,7 @@ public abstract class BaseTest { ...@@ -144,7 +134,7 @@ public abstract class BaseTest {
); );
HierarchicalTypeDefinition<ClassType> managerTypeDef = HierarchicalTypeDefinition<ClassType> managerTypeDef =
TypesUtil.createClassTypeDef("Manager", TypesUtil.createClassTypeDef("Manager",
ImmutableList.<String>of("Person"), ImmutableList.of("Person"),
new AttributeDefinition("subordinates", new AttributeDefinition("subordinates",
String.format("array<%s>", "Person"), String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager") Multiplicity.COLLECTION, false, "manager")
...@@ -158,8 +148,8 @@ public abstract class BaseTest { ...@@ -158,8 +148,8 @@ public abstract class BaseTest {
); );
ts.defineTypes(ImmutableList.<StructTypeDefinition>of(), ts.defineTypes(ImmutableList.<StructTypeDefinition>of(),
ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(securityClearanceTypeDef), ImmutableList.of(securityClearanceTypeDef),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of(deptTypeDef, personTypeDef, ImmutableList.of(deptTypeDef, personTypeDef,
managerTypeDef)); managerTypeDef));
ImmutableList<HierarchicalType> types = ImmutableList.of( ImmutableList<HierarchicalType> types = ImmutableList.of(
...@@ -183,9 +173,9 @@ public abstract class BaseTest { ...@@ -183,9 +173,9 @@ public abstract class BaseTest {
john.set("manager", jane); john.set("manager", jane);
hrDept.set("employees", ImmutableList.<Referenceable>of(john, jane)); hrDept.set("employees", ImmutableList.of(john, jane));
jane.set("subordinates", ImmutableList.<Referenceable>of(john)); jane.set("subordinates", ImmutableList.of(john));
jane.getTrait("SecurityClearance").set("level", 1); jane.getTrait("SecurityClearance").set("level", 1);
......
...@@ -16,13 +16,11 @@ ...@@ -16,13 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,23 +16,17 @@ ...@@ -16,23 +16,17 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IReferenceableInstance; import org.apache.hadoop.metadata.typesystem.IReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance; import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Referenceable; import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.EnumType;
import org.apache.hadoop.metadata.typesystem.types.EnumValue;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,12 +16,11 @@ ...@@ -16,12 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata.typesystem.types;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructType;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
...@@ -16,16 +16,13 @@ ...@@ -16,16 +16,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.metadata; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IStruct; import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.ITypedStruct; import org.apache.hadoop.metadata.typesystem.ITypedStruct;
import org.apache.hadoop.metadata.typesystem.Struct; import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.types.DataTypes;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import scala.actors.threadpool.Arrays;
import java.util.Collections;
import java.util.List;
public class TypeSystemTest extends BaseTest {
@BeforeClass
public void setUp() throws Exception {
super.setup();
}
@AfterMethod
public void tearDown() throws Exception {
getTypeSystem().reset();
}
@Test
public void testGetTypeNames() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().getTypeNames().contains("enum_test"));
}
@Test
public void testIsRegistered() throws Exception {
getTypeSystem().defineEnumType("enum_test",
new EnumValue("0", 0),
new EnumValue("1", 1),
new EnumValue("2", 2),
new EnumValue("3", 3));
Assert.assertTrue(getTypeSystem().isRegistered("enum_test"));
}
@Test
public void testGetTraitsNames() throws Exception {
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("Classification",
ImmutableList.<String>of(),
TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
HierarchicalTypeDefinition<TraitType> piiTrait =
TypesUtil.createTraitTypeDef("PII", ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> phiTrait =
TypesUtil.createTraitTypeDef("PHI", ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> pciTrait =
TypesUtil.createTraitTypeDef("PCI", ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> soxTrait =
TypesUtil.createTraitTypeDef("SOX", ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> secTrait =
TypesUtil.createTraitTypeDef("SEC", ImmutableList.<String>of());
HierarchicalTypeDefinition<TraitType> financeTrait =
TypesUtil.createTraitTypeDef("Finance", ImmutableList.<String>of());
getTypeSystem().defineTypes(
ImmutableList.<StructTypeDefinition>of(),
ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait,
soxTrait, secTrait, financeTrait),
ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
final ImmutableList<String> traitsNames = getTypeSystem().getTraitsNames();
Assert.assertEquals(traitsNames.size(), 7);
List traits = Arrays.asList(new String[]{
"Classification",
"PII",
"PHI",
"PCI",
"SOX",
"SEC",
"Finance",
});
Assert.assertFalse(Collections.disjoint(traitsNames, traits));
}
}
...@@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; ...@@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metadata.BaseTest; import org.apache.hadoop.metadata.typesystem.types.BaseTest;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
......
/* /**
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
...@@ -19,7 +19,6 @@ ...@@ -19,7 +19,6 @@
package org.apache.hadoop.metadata.typesystem.json package org.apache.hadoop.metadata.typesystem.json
import com.google.common.collect.ImmutableList import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata.BaseTest
import org.apache.hadoop.metadata.typesystem.types._ import org.apache.hadoop.metadata.typesystem.types._
import org.junit.{Assert, Test} import org.junit.{Assert, Test}
......
...@@ -29,6 +29,8 @@ import org.apache.hadoop.metadata.web.service.EmbeddedServer; ...@@ -29,6 +29,8 @@ import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Iterator;
/** /**
* Driver for running Metadata as a standalone server with embedded jetty server. * Driver for running Metadata as a standalone server with embedded jetty server.
*/ */
...@@ -60,36 +62,29 @@ public final class Main { ...@@ -60,36 +62,29 @@ public final class Main {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
CommandLine cmd = parseArgs(args); CommandLine cmd = parseArgs(args);
// todo: enable version for webapp PropertiesConfiguration buildConfiguration =
// String projectVersion = getProjectVersion(); new PropertiesConfiguration("metadata-buildinfo.properties");
// String appPath = "webapp/target/metadata-webapp-" + projectVersion; String appPath = "webapp/target/metadata-webapp-" + getProjectVersion(buildConfiguration);
String appPath = "webapp/target/metadata-governance";
if (cmd.hasOption(APP_PATH)) { if (cmd.hasOption(APP_PATH)) {
appPath = cmd.getOptionValue(APP_PATH); appPath = cmd.getOptionValue(APP_PATH);
} }
PropertiesConfiguration configuration = new PropertiesConfiguration( PropertiesConfiguration configuration =
"application.properties"); new PropertiesConfiguration("application.properties");
final String enableTLSFlag = configuration.getString("metadata.enableTLS"); final String enableTLSFlag = configuration.getString("metadata.enableTLS");
final int appPort = getApplicationPort(cmd, enableTLSFlag); final int appPort = getApplicationPort(cmd, enableTLSFlag);
final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort); final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort);
configuration.setProperty("metadata.enableTLS", String.valueOf(enableTLS)); configuration.setProperty("metadata.enableTLS", String.valueOf(enableTLS));
LOG.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); showStartupInfo(buildConfiguration, enableTLS, appPort);
LOG.info("Server starting with TLS ? {} on port {}", enableTLS, appPort);
LOG.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");
EmbeddedServer server = EmbeddedServer.newServer(appPort, appPath, enableTLS); EmbeddedServer server = EmbeddedServer.newServer(appPort, appPath, enableTLS);
server.start(); server.start();
} }
/* private static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
private static String getProjectVersion() throws ConfigurationException { return buildConfiguration.getString("project.version");
PropertiesConfiguration configuration =
new PropertiesConfiguration("metadata-buildinfo.properties");
return configuration.getString("project.version");
} }
*/
private static int getApplicationPort(CommandLine cmd, String enableTLSFlag) { private static int getApplicationPort(CommandLine cmd, String enableTLSFlag) {
final int appPort; final int appPort;
...@@ -110,4 +105,29 @@ public final class Main { ...@@ -110,4 +105,29 @@ public final class Main {
.getProperty("metadata.enableTLS", (appPort % 1000) == 443 ? "true" : "false") .getProperty("metadata.enableTLS", (appPort % 1000) == 443 ? "true" : "false")
: enableTLSFlag); : enableTLSFlag);
} }
private static void showStartupInfo(PropertiesConfiguration buildConfiguration,
boolean enableTLS, int appPort) {
StringBuilder buffer = new StringBuilder();
buffer.append("\n############################################");
buffer.append("############################################");
buffer.append("\n DGI Server (STARTUP)");
buffer.append("\n");
try {
final Iterator<String> keys = buildConfiguration.getKeys();
while (keys.hasNext()) {
String key = keys.next();
buffer.append('\n').append('\t').append(key).
append(":\t").append(buildConfiguration.getProperty(key));
}
} catch (Throwable e) {
buffer.append("*** Unable to get build info ***");
}
buffer.append("\n############################################");
buffer.append("############################################");
LOG.info(buffer.toString());
LOG.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
LOG.info("Server starting with TLS ? {} on port {}", enableTLS, appPort);
LOG.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");
}
} }
...@@ -93,7 +93,7 @@ public class EntityResource { ...@@ -93,7 +93,7 @@ public class EntityResource {
final String guid = metadataService.createEntity(typeName, entity); final String guid = metadataService.createEntity(typeName, entity);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(Servlets.REQUEST_ID, Servlets.getRequestId()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(Servlets.RESULTS, guid);
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException | IOException | IllegalArgumentException e) { } catch (MetadataException | IOException | IllegalArgumentException e) {
...@@ -128,7 +128,7 @@ public class EntityResource { ...@@ -128,7 +128,7 @@ public class EntityResource {
Response.Status status = Response.Status.NOT_FOUND; Response.Status status = Response.Status.NOT_FOUND;
if (entityDefinition != null) { if (entityDefinition != null) {
response.put("definition", entityDefinition); response.put(Servlets.RESULTS, entityDefinition);
status = Response.Status.OK; status = Response.Status.OK;
} }
...@@ -172,7 +172,8 @@ public class EntityResource { ...@@ -172,7 +172,8 @@ public class EntityResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(Servlets.REQUEST_ID, Servlets.getRequestId()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put("type", entityType); response.put("type", entityType);
response.put("list", new JSONArray(entityList)); response.put(Servlets.RESULTS, new JSONArray(entityList));
response.put(Servlets.TOTAL_SIZE, entityList.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
...@@ -206,7 +207,8 @@ public class EntityResource { ...@@ -206,7 +207,8 @@ public class EntityResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(Servlets.REQUEST_ID, Servlets.getRequestId()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put(GUID, guid); response.put(GUID, guid);
response.put("list", new JSONArray(traitNames)); response.put(Servlets.RESULTS, new JSONArray(traitNames));
response.put(Servlets.TOTAL_SIZE, traitNames.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException | IllegalArgumentException e) { } catch (MetadataException | IllegalArgumentException e) {
......
...@@ -56,10 +56,9 @@ import java.util.Map; ...@@ -56,10 +56,9 @@ import java.util.Map;
@Singleton @Singleton
public class MetadataDiscoveryResource { public class MetadataDiscoveryResource {
public static final String RESULTS = "results";
private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class); private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
private final DiscoveryService discoveryService; private final DiscoveryService discoveryService;
// public static final String TOTAL_SIZE = "totalSize";
/** /**
* Created by the Guice ServletModule and injected with the * Created by the Guice ServletModule and injected with the
...@@ -90,20 +89,20 @@ public class MetadataDiscoveryResource { ...@@ -90,20 +89,20 @@ public class MetadataDiscoveryResource {
try { try {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put("query", query); response.put("query", query);
try { // fall back to dsl try { // fall back to dsl
final String jsonResult = discoveryService.searchByDSL(query); final String jsonResult = discoveryService.searchByDSL(query);
response.put("queryType", "dsl"); response.put("queryType", "dsl");
response.put(RESULTS, new JSONObject(jsonResult)); response.put(Servlets.RESULTS, new JSONObject(jsonResult));
} catch (Throwable throwable) { } catch (Throwable throwable) {
LOG.error("Unable to get entity list for query {} using dsl", query, throwable); LOG.error("Unable to get entity list for query {} using dsl", query, throwable);
// todo: fall back to full text search // todo: fall back to full text search
response.put("queryType", "full-text"); response.put("queryType", "full-text");
response.put(RESULTS, new JSONObject()); response.put(Servlets.RESULTS, new JSONObject());
} }
return Response.ok(response).build(); return Response.ok(response).build();
...@@ -130,10 +129,10 @@ public class MetadataDiscoveryResource { ...@@ -130,10 +129,10 @@ public class MetadataDiscoveryResource {
final String jsonResult = discoveryService.searchByDSL(dslQuery); final String jsonResult = discoveryService.searchByDSL(dslQuery);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put("query", dslQuery); response.put("query", dslQuery);
response.put("queryType", "dsl"); response.put("queryType", "dsl");
response.put(RESULTS, new JSONObject(jsonResult)); response.put(Servlets.RESULTS, new JSONObject(jsonResult));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException e) {
...@@ -164,7 +163,7 @@ public class MetadataDiscoveryResource { ...@@ -164,7 +163,7 @@ public class MetadataDiscoveryResource {
.searchByGremlin(gremlinQuery); .searchByGremlin(gremlinQuery);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
response.put("query", gremlinQuery); response.put("query", gremlinQuery);
response.put("queryType", "gremlin"); response.put("queryType", "gremlin");
...@@ -172,7 +171,8 @@ public class MetadataDiscoveryResource { ...@@ -172,7 +171,8 @@ public class MetadataDiscoveryResource {
for (Map<String, String> result : results) { for (Map<String, String> result : results) {
list.put(new JSONObject(result)); list.put(new JSONObject(result));
} }
response.put(RESULTS, list); response.put(Servlets.RESULTS, list);
response.put(Servlets.TOTAL_SIZE, list.length());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (DiscoveryException e) { } catch (DiscoveryException e) {
...@@ -213,7 +213,7 @@ public class MetadataDiscoveryResource { ...@@ -213,7 +213,7 @@ public class MetadataDiscoveryResource {
.relationshipWalk(guid, depth, edgesToFollow); .relationshipWalk(guid, depth, edgesToFollow);
try { try {
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
if (resultMap.containsKey("vertices")) { if (resultMap.containsKey("vertices")) {
response.put("vertices", new JSONObject(resultMap.get("vertices"))); response.put("vertices", new JSONObject(resultMap.get("vertices")));
} }
...@@ -259,7 +259,7 @@ public class MetadataDiscoveryResource { ...@@ -259,7 +259,7 @@ public class MetadataDiscoveryResource {
searchText, depth, prop); searchText, depth, prop);
try { try {
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
if (resultMap.containsKey("vertices")) { if (resultMap.containsKey("vertices")) {
response.put("vertices", resultMap.get("vertices")); response.put("vertices", resultMap.get("vertices"));
} }
......
...@@ -61,8 +61,6 @@ import java.util.Set; ...@@ -61,8 +61,6 @@ import java.util.Set;
@Path("graph") @Path("graph")
@Singleton @Singleton
public class RexsterGraphResource { public class RexsterGraphResource {
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
public static final String OUT_E = "outE"; public static final String OUT_E = "outE";
public static final String IN_E = "inE"; public static final String IN_E = "inE";
public static final String BOTH_E = "bothE"; public static final String BOTH_E = "bothE";
...@@ -122,7 +120,7 @@ public class RexsterGraphResource { ...@@ -122,7 +120,7 @@ public class RexsterGraphResource {
Vertex vertex = findVertex(vertexId); Vertex vertex = findVertex(vertexId);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(RESULTS, GraphSONUtility.jsonFromElement( response.put(Servlets.RESULTS, GraphSONUtility.jsonFromElement(
vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL)); vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (JSONException e) { } catch (JSONException e) {
...@@ -163,8 +161,8 @@ public class RexsterGraphResource { ...@@ -163,8 +161,8 @@ public class RexsterGraphResource {
Map<String, String> vertexProperties = getVertexProperties(vertex); Map<String, String> vertexProperties = getVertexProperties(vertex);
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(RESULTS, new JSONObject(vertexProperties)); response.put(Servlets.RESULTS, new JSONObject(vertexProperties));
response.put(TOTAL_SIZE, vertexProperties.size()); response.put(Servlets.TOTAL_SIZE, vertexProperties.size());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (JSONException e) { } catch (JSONException e) {
throw new WebApplicationException( throw new WebApplicationException(
...@@ -273,9 +271,9 @@ public class RexsterGraphResource { ...@@ -273,9 +271,9 @@ public class RexsterGraphResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
if (!countOnly) { if (!countOnly) {
response.put(RESULTS, elementArray); response.put(Servlets.RESULTS, elementArray);
} }
response.put(TOTAL_SIZE, counter); response.put(Servlets.TOTAL_SIZE, counter);
return Response.ok(response).build(); return Response.ok(response).build();
} }
...@@ -301,7 +299,7 @@ public class RexsterGraphResource { ...@@ -301,7 +299,7 @@ public class RexsterGraphResource {
} }
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(RESULTS, GraphSONUtility.jsonFromElement( response.put(Servlets.RESULTS, GraphSONUtility.jsonFromElement(
edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL)); edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
return Response.ok(response).build(); return Response.ok(response).build();
} catch (JSONException e) { } catch (JSONException e) {
...@@ -321,8 +319,8 @@ public class RexsterGraphResource { ...@@ -321,8 +319,8 @@ public class RexsterGraphResource {
} }
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put(RESULTS, vertexArray); response.put(Servlets.RESULTS, vertexArray);
response.put(TOTAL_SIZE, counter); response.put(Servlets.TOTAL_SIZE, counter);
return response; return response;
} }
......
...@@ -84,7 +84,7 @@ public class TypesResource { ...@@ -84,7 +84,7 @@ public class TypesResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("typeName", typeName); response.put("typeName", typeName);
response.put("types", typesAdded); response.put("types", typesAdded);
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (Exception e) { } catch (Exception e) {
...@@ -110,7 +110,7 @@ public class TypesResource { ...@@ -110,7 +110,7 @@ public class TypesResource {
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("typeName", typeName); response.put("typeName", typeName);
response.put("definition", typeDefinition); response.put("definition", typeDefinition);
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (MetadataException e) { } catch (MetadataException e) {
...@@ -125,7 +125,7 @@ public class TypesResource { ...@@ -125,7 +125,7 @@ public class TypesResource {
} }
/** /**
* Gets the list of types registed in the type system. * Gets the list of type names registered in the type system.
*/ */
@GET @GET
@Path("list") @Path("list")
...@@ -135,8 +135,32 @@ public class TypesResource { ...@@ -135,8 +135,32 @@ public class TypesResource {
final List<String> typeNamesList = metadataService.getTypeNamesList(); final List<String> typeNamesList = metadataService.getTypeNamesList();
JSONObject response = new JSONObject(); JSONObject response = new JSONObject();
response.put("list", new JSONArray(typeNamesList)); response.put(Servlets.RESULTS, new JSONArray(typeNamesList));
response.put("requestId", Thread.currentThread().getName()); response.put(Servlets.TOTAL_SIZE, typeNamesList.size());
response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build();
} catch (Exception e) {
LOG.error("Unable to get types list", e);
throw new WebApplicationException(
Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
}
}
/**
* Gets the list of trait type names registered in the type system.
*/
@GET
@Path("traits/list")
@Produces(MediaType.APPLICATION_JSON)
public Response getTraitNames(@Context HttpServletRequest request) {
try {
final List<String> traitNamesList = metadataService.getTraitNamesList();
JSONObject response = new JSONObject();
response.put(Servlets.RESULTS, new JSONArray(traitNamesList));
response.put(Servlets.TOTAL_SIZE, traitNamesList.size());
response.put(Servlets.REQUEST_ID, Servlets.getRequestId());
return Response.ok(response).build(); return Response.ok(response).build();
} catch (Exception e) { } catch (Exception e) {
......
...@@ -34,6 +34,8 @@ import java.io.StringWriter; ...@@ -34,6 +34,8 @@ import java.io.StringWriter;
public final class Servlets { public final class Servlets {
public static final String REQUEST_ID = "requestId"; public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
private Servlets() { private Servlets() {
/* singleton */ /* singleton */
......
...@@ -23,11 +23,11 @@ ...@@ -23,11 +23,11 @@
<meta name="viewport" content="width=device-width, initial-scale=1.0"/> <meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta name="Date-Revision-yyyymmdd" content="20130821"/> <meta name="Date-Revision-yyyymmdd" content="20130821"/>
<meta http-equiv="Content-Language" content="en"/> <meta http-equiv="Content-Language" content="en"/>
<title>Apache Falcon - Data management and processing platform</title> <title>Apache DGI - Data Governance platform</title>
</head> </head>
<body class="topBarEnabled"> <body class="topBarEnabled">
<h1> Apache Metadata Governance</h1> <h1> Apache DGI </h1>
More information at: <a href="http://dgc.incubator.apache.org/index.html" title="About">Project More information at: <a href="http://dgi.incubator.apache.org/index.html" title="About">Project
Website</a> Website</a>
</body> </body>
</html> </html>
...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse; ...@@ -23,6 +23,7 @@ import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem; import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
...@@ -68,6 +69,6 @@ public abstract class BaseResourceIT { ...@@ -68,6 +69,6 @@ public abstract class BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertEquals(response.get("typeName"), type); Assert.assertEquals(response.get("typeName"), type);
Assert.assertNotNull(response.get("types")); Assert.assertNotNull(response.get("types"));
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
} }
} }
...@@ -40,6 +40,7 @@ import org.apache.hadoop.metadata.typesystem.types.Multiplicity; ...@@ -40,6 +40,7 @@ import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger; import org.slf4j.Logger;
...@@ -98,9 +99,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -98,9 +99,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
guid = response.get("GUID").toString(); guid = response.get(Servlets.RESULTS).toString();
Assert.assertNotNull(guid); Assert.assertNotNull(guid);
try { try {
...@@ -126,9 +127,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -126,9 +127,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
final String definition = response.getString("definition"); final String definition = response.getString(Servlets.RESULTS);
Assert.assertNotNull(definition); Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition); LOG.debug("tableInstanceAfterGet = " + definition);
...@@ -192,9 +193,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -192,9 +193,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
final JSONArray list = response.getJSONArray("list"); final JSONArray list = response.getJSONArray(Servlets.RESULTS);
Assert.assertNotNull(list); Assert.assertNotNull(list);
Assert.assertEquals(list.length(), 1); Assert.assertEquals(list.length(), 1);
} }
...@@ -228,9 +229,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -228,9 +229,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
final JSONArray list = response.getJSONArray("list"); final JSONArray list = response.getJSONArray(Servlets.RESULTS);
Assert.assertEquals(list.length(), 0); Assert.assertEquals(list.length(), 0);
} }
...@@ -259,10 +260,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -259,10 +260,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertNotNull(response.get("GUID")); Assert.assertNotNull(response.get("GUID"));
final JSONArray list = response.getJSONArray("list"); final JSONArray list = response.getJSONArray(Servlets.RESULTS);
Assert.assertEquals(list.length(), 7); Assert.assertEquals(list.length(), 7);
} }
...@@ -294,7 +295,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -294,7 +295,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertNotNull(response.get("GUID")); Assert.assertNotNull(response.get("GUID"));
Assert.assertNotNull(response.get("traitInstance")); Assert.assertNotNull(response.get("traitInstance"));
} }
...@@ -341,7 +342,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT { ...@@ -341,7 +342,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertNotNull(response.get("GUID")); Assert.assertNotNull(response.get("GUID"));
Assert.assertNotNull(response.get("traitName")); Assert.assertNotNull(response.get("traitName"));
} }
......
...@@ -33,6 +33,7 @@ import org.apache.hadoop.metadata.typesystem.types.Multiplicity; ...@@ -33,6 +33,7 @@ import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition; import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
...@@ -77,12 +78,12 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT { ...@@ -77,12 +78,12 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertEquals(response.getString("query"), dslQuery); Assert.assertEquals(response.getString("query"), dslQuery);
Assert.assertEquals(response.getString("queryType"), "dsl"); Assert.assertEquals(response.getString("queryType"), "dsl");
JSONObject results = response.getJSONObject("results"); JSONObject results = response.getJSONObject(Servlets.RESULTS);
Assert.assertNotNull(results); Assert.assertNotNull(results);
JSONArray rows = results.getJSONArray("rows"); JSONArray rows = results.getJSONArray("rows");
...@@ -121,7 +122,7 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT { ...@@ -121,7 +122,7 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query); Assert.assertEquals(response.getString("query"), query);
Assert.assertEquals(response.getString("queryType"), "gremlin"); Assert.assertEquals(response.getString("queryType"), "gremlin");
...@@ -144,7 +145,7 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT { ...@@ -144,7 +145,7 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
Assert.assertEquals(response.getString("query"), query); Assert.assertEquals(response.getString("query"), query);
Assert.assertEquals(response.getString("queryType"), "dsl"); Assert.assertEquals(response.getString("queryType"), "dsl");
...@@ -252,9 +253,9 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT { ...@@ -252,9 +253,9 @@ public class MetadataDiscoveryResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
String guid = response.get("GUID").toString(); String guid = response.get(Servlets.RESULTS).toString();
Assert.assertNotNull(guid); Assert.assertNotNull(guid);
} }
......
...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; ...@@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization; import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization$;
import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition; import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
import org.apache.hadoop.metadata.typesystem.types.ClassType; import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.DataTypes; import org.apache.hadoop.metadata.typesystem.types.DataTypes;
...@@ -29,6 +30,7 @@ import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition; ...@@ -29,6 +30,7 @@ import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.apache.hadoop.metadata.typesystem.types.Multiplicity; import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
import org.apache.hadoop.metadata.typesystem.types.TraitType; import org.apache.hadoop.metadata.typesystem.types.TraitType;
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil; import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
import org.apache.hadoop.metadata.web.util.Servlets;
import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject; import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert; import org.testng.Assert;
...@@ -83,7 +85,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -83,7 +85,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertEquals(response.get("typeName"), typeDefinition.typeName); Assert.assertEquals(response.get("typeName"), typeDefinition.typeName);
Assert.assertNotNull(response.get("types")); Assert.assertNotNull(response.get("types"));
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
} }
} }
...@@ -108,7 +110,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -108,7 +110,7 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertEquals(response.get("typeName"), typeDefinition.typeName); Assert.assertEquals(response.get("typeName"), typeDefinition.typeName);
Assert.assertNotNull(response.get("definition")); Assert.assertNotNull(response.get("definition"));
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
} }
} }
...@@ -140,12 +142,56 @@ public class TypesJerseyResourceIT extends BaseResourceIT { ...@@ -140,12 +142,56 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
Assert.assertNotNull(responseAsString); Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString); JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get("requestId")); Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
final JSONArray list = response.getJSONArray("list"); final JSONArray list = response.getJSONArray(Servlets.RESULTS);
Assert.assertNotNull(list); Assert.assertNotNull(list);
} }
@Test
public void testGetTraitNames() throws Exception {
String[] traitsAdded = addTraits();
WebResource resource = service
.path("api/metadata/types/traits/list");
ClientResponse clientResponse = resource
.accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON)
.method(HttpMethod.GET, ClientResponse.class);
Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
String responseAsString = clientResponse.getEntity(String.class);
Assert.assertNotNull(responseAsString);
JSONObject response = new JSONObject(responseAsString);
Assert.assertNotNull(response.get(Servlets.REQUEST_ID));
final JSONArray list = response.getJSONArray(Servlets.RESULTS);
Assert.assertNotNull(list);
Assert.assertTrue(list.length() >= traitsAdded.length);
}
private String[] addTraits() throws Exception {
String[] traitNames = {
"class_trait",
"secure_trait",
"pii_trait",
"ssn_trait",
"salary_trait",
"sox_trait",
};
for (String traitName : traitNames) {
HierarchicalTypeDefinition<TraitType> traitTypeDef =
TypesUtil.createTraitTypeDef(traitName, ImmutableList.<String>of());
String json = TypesSerialization$.MODULE$.toJson(traitTypeDef, true);
sumbitType(json, traitName);
}
return traitNames;
}
private List<HierarchicalTypeDefinition> createHiveTypes() throws Exception { private List<HierarchicalTypeDefinition> createHiveTypes() throws Exception {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>(); ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment