Commit 6a9193c7 by Suma Shivaprasad

Merged with master

parents b5bf5cd9 e4385af6
......@@ -35,19 +35,22 @@
<properties>
<hive.version>1.1.0</hive.version>
<calcite.version>0.9.2-incubating</calcite.version>
<hadoop.version>2.5.0</hadoop.version>
<hadoop.version>2.6.0</hadoop.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>runtime</scope>
<type>test-jar</type>
</dependency>
<dependency>
......@@ -55,6 +58,13 @@
<artifactId>metadata-typesystem</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<!-- Logging -->
<dependency>
<groupId>org.slf4j</groupId>
......@@ -92,7 +102,12 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
......@@ -104,9 +119,15 @@
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-webapp</artifactId>
<version>${project.version}</version>
<type>war</type>
<classifier>classes</classifier>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
......@@ -254,6 +275,12 @@
<skip>false</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<inherited>true</inherited>
<extensions>true</extensions>
</plugin>
</plugins>
</build>
</project>
......@@ -59,9 +59,11 @@ for i in "${BASEDIR}/bridge/hive/"*.jar; do
METADATACPPATH="${METADATACPPATH}:$i"
done
echo $METADATACPPATH
# log dir for applications
METADATA_LOG_DIR="${METADATA_LOG_DIR:-$BASEDIR/logs}"
export METADATA_LOG_DIR
JAVA_PROPERTIES="$METADATA_OPTS"
JAVA_PROPERTIES="$METADATA_OPTS -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.log.file=import-hive.log"
shift
while [[ ${1} =~ ^\-D ]]; do
......@@ -70,6 +72,7 @@ while [[ ${1} =~ ^\-D ]]; do
done
TIME=`date +%Y%m%d%H%M%s`
#Add hive conf in classpath
if [ ! -z "$HIVE_CONF_DIR" ]; then
HIVE_CP=$HIVE_CONF_DIR
elif [ ! -z "$HIVE_HOME" ]; then
......@@ -86,5 +89,5 @@ echo Using Hive configuration directory [$HIVE_CP]
${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${HIVE_CP}:${METADATACPPATH} org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge
RETVAL=$?
[ $RETVAL -eq 0 ] && echo Hive Data Model Imported!!!
[ $RETVAL -ne 0 ] && echo Failure in Hive Data Model import!!!
[ $RETVAL -eq 0 ] && echo Hive Data Model imported successfully!!!
[ $RETVAL -ne 0 ] && echo Failed to import Hive Data Model!!!
......@@ -18,6 +18,7 @@
package org.apache.hadoop.metadata.hive.bridge;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
......@@ -31,9 +32,15 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.Struct;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -97,22 +104,48 @@ public class HiveMetaStoreBridge {
}
}
public Referenceable registerDatabase(String databaseName) throws Exception {
LOG.info("Importing objects from databaseName : " + databaseName);
Database hiveDB = hiveClient.getDatabase(databaseName);
Referenceable dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
dbRef.set("name", hiveDB.getName());
dbRef.set("description", hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName());
if (hiveDB.getOwnerType() != null) {
dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
/**
* Gets reference for the database
*
* @param dbName database name
* @return Reference for database if exists, else null
* @throws Exception
*/
private Referenceable getDatabaseReference(String dbName) throws Exception {
LOG.debug("Getting reference for database {}", dbName);
String typeName = HiveDataTypes.HIVE_DB.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
JSONArray results = dgiClient.rawSearch(typeName, "name", dbName);
if (results.length() == 0) {
return null;
} else {
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
return new Referenceable(guid, typeName, null);
}
}
public Referenceable registerDatabase(String databaseName) throws Exception {
Referenceable dbRef = getDatabaseReference(databaseName);
if (dbRef == null) {
LOG.info("Importing objects from databaseName : " + databaseName);
Database hiveDB = hiveClient.getDatabase(databaseName);
dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
dbRef.set("name", hiveDB.getName());
dbRef.set("description", hiveDB.getDescription());
dbRef.set("locationUri", hiveDB.getLocationUri());
dbRef.set("parameters", hiveDB.getParameters());
dbRef.set("ownerName", hiveDB.getOwnerName());
if (hiveDB.getOwnerType() != null) {
dbRef.set("ownerType", hiveDB.getOwnerType().getValue());
}
return createInstance(dbRef);
dbRef = createInstance(dbRef);
} else {
LOG.info("Database {} is already registered with id {}", databaseName, dbRef.getId().id);
}
return dbRef;
}
public Referenceable createInstance(Referenceable referenceable) throws Exception {
......@@ -132,81 +165,135 @@ public class HiveMetaStoreBridge {
List<String> hiveTables = hiveClient.getAllTables(databaseName);
for (String tableName : hiveTables) {
Pair<Referenceable, Referenceable> tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName);
Referenceable tableReferenceable = registerTable(databaseReferenceable, databaseName, tableName);
// Import Partitions
importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable.first, tableReferenceable.second);
Referenceable sdReferenceable = getSDForTable(databaseReferenceable, tableName);
importPartitions(databaseName, tableName, databaseReferenceable, tableReferenceable, sdReferenceable);
// Import Indexes
importIndexes(databaseName, tableName, databaseReferenceable, tableReferenceable.first);
importIndexes(databaseName, tableName, databaseReferenceable, tableReferenceable);
}
}
public Pair<Referenceable, Referenceable> registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
LOG.info("Importing objects from " + dbName + "." + tableName);
Table hiveTable = hiveClient.getTable(dbName, tableName);
/**
* Gets reference for the table
*
* @param dbRef
* @param tableName table name
* @return table reference if exists, else null
* @throws Exception
*/
private Referenceable getTableReference(Referenceable dbRef, String tableName) throws Exception {
LOG.debug("Getting reference for table {}.{}", dbRef, tableName);
Referenceable tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set("tableName", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner());
//todo fix
tableRef.set("createTime", hiveTable.getLastAccessTime());
tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
tableRef.set("retention", hiveTable.getRetention());
// add reference to the database
tableRef.set("dbName", dbReference);
// add reference to the StorageDescriptor
StorageDescriptor storageDesc = hiveTable.getSd();
Referenceable sdReferenceable = fillStorageDescStruct(storageDesc);
tableRef.set("sd", sdReferenceable);
// add reference to the Partition Keys
List<Referenceable> partKeys = new ArrayList<>();
Referenceable colRef;
if (hiveTable.getPartitionKeys().size() > 0) {
for (FieldSchema fs : hiveTable.getPartitionKeys()) {
colRef = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colRef.set("name", fs.getName());
colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment());
Referenceable colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped);
}
String typeName = HiveDataTypes.HIVE_TABLE.getName();
MetadataServiceClient dgiClient = getMetadataServiceClient();
tableRef.set("partitionKeys", partKeys);
//todo DSL support for reference doesn't work. is the usage right?
// String query = String.format("%s where dbName = \"%s\" and tableName = \"%s\"", typeName, dbRef.getId().id,
// tableName);
String query = String.format("%s where name = \"%s\"", typeName, tableName);
JSONArray results = dgiClient.searchByDSL(query);
if (results.length() == 0) {
return null;
} else {
//There should be just one instance with the given name
String guid = getGuidFromDSLResponse(results.getJSONObject(0));
LOG.debug("Got reference for table {}.{} = {}", dbRef, tableName, guid);
return new Referenceable(guid, typeName, null);
}
}
tableRef.set("parameters", hiveTable.getParameters());
private String getGuidFromDSLResponse(JSONObject jsonObject) throws JSONException {
return jsonObject.getJSONObject("$id$").getString("id");
}
if (hiveTable.getViewOriginalText() != null) {
tableRef.set("viewOriginalText", hiveTable.getViewOriginalText());
private Referenceable getSDForTable(Referenceable dbRef, String tableName) throws Exception {
Referenceable tableRef = getTableReference(dbRef, tableName);
if (tableRef == null) {
throw new IllegalArgumentException("Table " + dbRef + "." + tableName + " doesn't exist");
}
if (hiveTable.getViewExpandedText() != null) {
tableRef.set("viewExpandedText", hiveTable.getViewExpandedText());
}
MetadataServiceClient dgiClient = getMetadataServiceClient();
Referenceable tableInstance = dgiClient.getEntity(tableRef.getId().id);
Id sdId = (Id) tableInstance.get("sd");
return new Referenceable(sdId.id, sdId.getTypeName(), null);
}
public Referenceable registerTable(String dbName, String tableName) throws Exception {
Referenceable dbReferenceable = registerDatabase(dbName);
return registerTable(dbReferenceable, dbName, tableName);
}
tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary());
public Referenceable registerTable(Referenceable dbReference, String dbName, String tableName) throws Exception {
LOG.info("Attempting to register table [" + tableName + "]");
Referenceable tableRef = getTableReference(dbReference, tableName);
if (tableRef == null) {
LOG.info("Importing objects from " + dbName + "." + tableName);
Table hiveTable = hiveClient.getTable(dbName, tableName);
tableRef = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
tableRef.set("name", hiveTable.getTableName());
tableRef.set("owner", hiveTable.getOwner());
//todo fix
tableRef.set("createTime", hiveTable.getLastAccessTime());
tableRef.set("lastAccessTime", hiveTable.getLastAccessTime());
tableRef.set("retention", hiveTable.getRetention());
// add reference to the database
tableRef.set("dbName", dbReference);
// add reference to the StorageDescriptor
StorageDescriptor storageDesc = hiveTable.getSd();
Referenceable sdReferenceable = fillStorageDescStruct(storageDesc);
tableRef.set("sd", sdReferenceable);
// add reference to the Partition Keys
List<Referenceable> partKeys = new ArrayList<>();
Referenceable colRef;
if (hiveTable.getPartitionKeys().size() > 0) {
for (FieldSchema fs : hiveTable.getPartitionKeys()) {
colRef = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colRef.set("name", fs.getName());
colRef.set("type", fs.getType());
colRef.set("comment", fs.getComment());
Referenceable colRefTyped = createInstance(colRef);
partKeys.add(colRefTyped);
}
tableRef.set("partitionKeys", partKeys);
}
// List<Referenceable> fieldsList = getColumns(storageDesc);
// tableRef.set("columns", fieldsList);
tableRef.set("parameters", hiveTable.getParameters());
Referenceable tableReferenceable = createInstance(tableRef);
return Pair.of(tableReferenceable, sdReferenceable);
if (hiveTable.getViewOriginalText() != null) {
tableRef.set("viewOriginalText", hiveTable.getViewOriginalText());
}
if (hiveTable.getViewExpandedText() != null) {
tableRef.set("viewExpandedText", hiveTable.getViewExpandedText());
}
tableRef.set("tableType", hiveTable.getTableType());
tableRef.set("temporary", hiveTable.isTemporary());
List<Referenceable> colList = getColumns(hiveTable.getAllCols());
tableRef.set("columns", colList);
tableRef = createInstance(tableRef);
} else {
LOG.info("Table {}.{} is already registered with id {}", dbName, tableName, tableRef.getId().id);
}
return tableRef;
}
private void importPartitions(String db, String tableName,
Referenceable dbReferenceable,
Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception {
Table table = new Table();
table.setDbName(db);
table.setTableName(tableName);
Set<Partition> tableParts = hiveClient.getAllPartitionsOf(table);
Set<Partition> tableParts = hiveClient.getAllPartitionsOf(new Table(Table.getEmptyTable(db, tableName)));
if (tableParts.size() > 0) {
for (Partition hivePart : tableParts) {
......@@ -215,10 +302,13 @@ public class HiveMetaStoreBridge {
}
}
//todo should be idempotent
private Referenceable importPartition(Partition hivePart,
Referenceable dbReferenceable,
Referenceable tableReferenceable,
Referenceable sdReferenceable) throws Exception {
LOG.info("Importing partition for {}.{} with values {}", dbReferenceable, tableReferenceable,
StringUtils.join(hivePart.getValues(), ","));
Referenceable partRef = new Referenceable(HiveDataTypes.HIVE_PARTITION.getName());
partRef.set("values", hivePart.getValues());
......@@ -250,9 +340,11 @@ public class HiveMetaStoreBridge {
}
}
//todo should be idempotent
private void importIndex(Index index,
Referenceable dbReferenceable,
Referenceable tableReferenceable) throws Exception {
LOG.info("Importing index {} for {}.{}", index.getIndexName(), dbReferenceable, tableReferenceable);
Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.getName());
indexRef.set("indexName", index.getIndexName());
......@@ -310,7 +402,7 @@ public class HiveMetaStoreBridge {
}
*/
List<Referenceable> fieldsList = getColumns(storageDesc);
List<Referenceable> fieldsList = getColumns(storageDesc.getCols());
sdReferenceable.set("cols", fieldsList);
List<Struct> sortColsStruct = new ArrayList<>();
......@@ -341,19 +433,19 @@ public class HiveMetaStoreBridge {
return createInstance(sdReferenceable);
}
private List<Referenceable> getColumns(StorageDescriptor storageDesc) throws Exception {
List<Referenceable> fieldsList = new ArrayList<>();
Referenceable colReferenceable;
for (FieldSchema fs : storageDesc.getCols()) {
private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
{
List<Referenceable> colList = new ArrayList<>();
for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs);
colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colReferenceable.set("name", fs.getName());
colReferenceable.set("type", fs.getType());
colReferenceable.set("comment", fs.getComment());
fieldsList.add(createInstance(colReferenceable));
colList.add(createInstance(colReferenceable));
}
return fieldsList;
return colList;
}
public synchronized void registerHiveDataModel() throws Exception {
......@@ -362,9 +454,10 @@ public class HiveMetaStoreBridge {
//Register hive data model if its not already registered
if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
LOG.info("Registering Hive data model");
dgiClient.createType(dataModelGenerator.getModelAsJson());
} else {
LOG.debug("Hive data model is already registered!");
LOG.info("Hive data model is already registered!");
}
}
......
......@@ -207,7 +207,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
Table table = entity.getTable();
//TODO table.getDbName().toLowerCase() is required as hive stores in lowercase,
// but table.getDbName() is not lowercase
Referenceable dbReferenceable = getDatabaseReference(dgiBridge, table.getDbName().toLowerCase());
Referenceable dbReferenceable = dgiBridge.registerDatabase(table.getDbName().toLowerCase());
dgiBridge.registerTable(dbReferenceable, table.getDbName(), table.getTableName());
}
}
......@@ -230,7 +230,8 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.info("Explain statement. Skipping...");
}
String user = hookContext.getUserName();
//todo hookContext.getUserName() is null in hdp sandbox 2.2.4
String user = hookContext.getUserName() == null ? System.getProperty("user.name") : hookContext.getUserName();
HiveOperation operation = HiveOperation.valueOf(hookContext.getOperationName());
String queryId = null;
String queryStr = null;
......@@ -245,7 +246,7 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
LOG.debug("Registering CTAS query: {}", queryStr);
Referenceable processReferenceable = new Referenceable(HiveDataTypes.HIVE_PROCESS.getName());
processReferenceable.set("processName", operation.getOperationName());
processReferenceable.set("name", operation.getOperationName());
processReferenceable.set("startTime", queryStartTime);
processReferenceable.set("userName", user);
List<Referenceable> source = new ArrayList<>();
......@@ -253,19 +254,19 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
if (readEntity.getTyp() == Entity.Type.TABLE) {
Table table = readEntity.getTable();
String dbName = table.getDbName().toLowerCase();
source.add(getTableReference(dgiBridge, dbName, table.getTableName()));
source.add(dgiBridge.registerTable(dbName, table.getTableName()));
}
}
processReferenceable.set("sourceTableNames", source);
processReferenceable.set("inputTables", source);
List<Referenceable> target = new ArrayList<>();
for (WriteEntity writeEntity : outputs) {
if (writeEntity.getTyp() == Entity.Type.TABLE) {
Table table = writeEntity.getTable();
String dbName = table.getDbName().toLowerCase();
target.add(getTableReference(dgiBridge, dbName, table.getTableName()));
target.add(dgiBridge.registerTable(dbName, table.getTableName()));
}
}
processReferenceable.set("targetTableNames", target);
processReferenceable.set("outputTables", target);
processReferenceable.set("queryText", queryStr);
processReferenceable.set("queryId", queryId);
processReferenceable.set("queryPlan", getQueryPlan(hookContext, conf));
......@@ -276,58 +277,6 @@ public class HiveHook implements ExecuteWithHookContext, HiveSemanticAnalyzerHoo
dgiBridge.createInstance(processReferenceable);
}
/**
* Gets reference for the database. Creates new instance if it doesn't exist
*
* @param dgiBridge
* @param dbName database name
* @return Reference for database
* @throws Exception
*/
private Referenceable getDatabaseReference(HiveMetaStoreBridge dgiBridge, String dbName) throws Exception {
String typeName = HiveDataTypes.HIVE_DB.getName();
MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
JSONObject result = dgiClient.rawSearch(typeName, "name", dbName);
JSONArray results = (JSONArray) result.get("results");
if (results.length() == 0) {
//Create new instance
return dgiBridge.registerDatabase(dbName);
} else {
String guid = (String) ((JSONObject) results.get(0)).get("guid");
return new Referenceable(guid, typeName, null);
}
}
/**
* Gets reference for the table. Creates new instance if it doesn't exist
*
* @param dgiBridge
* @param dbName
* @param tableName table name
* @return table reference
* @throws Exception
*/
private Referenceable getTableReference(HiveMetaStoreBridge dgiBridge, String dbName, String tableName) throws Exception {
String typeName = HiveDataTypes.HIVE_TABLE.getName();
MetadataServiceClient dgiClient = dgiBridge.getMetadataServiceClient();
JSONObject result = dgiClient.rawSearch(typeName, "tableName", tableName);
JSONArray results = (JSONArray) result.get("results");
if (results.length() == 0) {
Referenceable dbRererence = getDatabaseReference(dgiBridge, dbName);
return dgiBridge.registerTable(dbRererence, dbName, tableName).first;
} else {
//There should be just one instance with the given name
String guid = (String) ((JSONObject) results.get(0)).get("guid");
return new Referenceable(guid, typeName, null);
}
}
private String getQueryPlan(HookContext hookContext, HiveConf conf) throws Exception {
//We need to somehow get the sem associated with the plan and use it here.
......
......@@ -340,8 +340,8 @@ public class HiveDataModelGenerator {
private void createPartitionClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("values", DataTypes.STRING_TYPE.getName(),
Multiplicity.COLLECTION, false, null),
new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("tableName", HiveDataTypes.HIVE_TABLE.getName(),
......@@ -354,10 +354,9 @@ public class HiveDataModelGenerator {
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.COLLECTION, true, null),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
};
HierarchicalTypeDefinition<ClassType> definition =
new HierarchicalTypeDefinition<>(ClassType.class,
......@@ -368,7 +367,7 @@ public class HiveDataModelGenerator {
private void createTableClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("tableName", DataTypes.STRING_TYPE.getName(),
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("dbName", HiveDataTypes.HIVE_DB.getName(),
Multiplicity.REQUIRED, false, null),
......@@ -385,9 +384,9 @@ public class HiveDataModelGenerator {
new AttributeDefinition("partitionKeys",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, false, null),
// new AttributeDefinition("columns",
// DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
// Multiplicity.COLLECTION, true, null),
new AttributeDefinition("columns",
DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
......@@ -481,7 +480,7 @@ public class HiveDataModelGenerator {
private void createProcessClass() throws MetadataException {
AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
new AttributeDefinition("processName", DataTypes.STRING_TYPE.getName(),
new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
new AttributeDefinition("startTime", DataTypes.INT_TYPE.getName(),
Multiplicity.REQUIRED, false, null),
......
......@@ -7,7 +7,7 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_order(StructType) - [col, order]
* hive_resourceuri(StructType) - [resourceType, uri]
* hive_serde(StructType) - [name, serializationLib, parameters]
* hive_process(ClassType) - [processName, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph]
* hive_process(ClassType) - [name, startTime, endTime, userName, sourceTableNames, targetTableNames, queryText, queryPlan, queryId, queryGraph]
* hive_function(ClassType) - [functionName, dbName, className, ownerName, ownerType, createTime, functionType, resourceUris]
* hive_type(ClassType) - [name, type1, type2, fields]
* hive_partition(ClassType) - [values, dbName, tableName, createTime, lastAccessTime, sd, parameters]
......@@ -16,13 +16,13 @@ Hive metadata can be modelled in DGI using its Type System. The default modellin
* hive_role(ClassType) - [roleName, createTime, ownerName]
* hive_column(ClassType) - [name, type, comment]
* hive_db(ClassType) - [name, description, locationUri, parameters, ownerName, ownerType]
* hive_table(ClassType) - [tableName, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
* hive_table(ClassType) - [name, dbName, owner, createTime, lastAccessTime, retention, sd, partitionKeys, columns, parameters, viewOriginalText, viewExpandedText, tableType, temporary]
---++ Importing Hive Metadata
org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge imports the hive metadata into DGI using the typesystem defined in org.apache.hadoop.metadata.hive.model.HiveDataModelGenerator. import-hive.sh command can be used to facilitate this.
Set-up the following configs in <dgi package>/conf/hive-site.xml:
* Hive metastore configuration - Refer [[https://cwiki.apache.org/confluence/display/Hive/AdminManual+MetastoreAdmin][Hive Metastore Configuration documentation]]
Set-up the following configs in hive-site.xml of your hive set-up and set environment variable HIVE_CONFIG to the
hive conf directory:
* DGI endpoint - Add the following property with the DGI endpoint for your set-up
<verbatim>
<property>
......@@ -31,7 +31,7 @@ Set-up the following configs in <dgi package>/conf/hive-site.xml:
</property>
</verbatim>
Usage: <dgi package>/bin/import-hive.sh
Usage: <dgi package>/bin/import-hive.sh. The logs are in <dgi package>/logs/import-hive.log
---++ Hive Hook
......@@ -57,4 +57,5 @@ The following properties in hive-site.xml control the thread pool details:
* hive.hook.dgi.minThreads - core number of threads. default 5
* hive.hook.dgi.maxThreads - maximum number of threads. default 5
* hive.hook.dgi.keepAliveTime - keep alive time in msecs. default 10
* hive.hook.dgi.synchronous - boolean, true to run the hook synchronously. default false
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.mortbay.jetty.Server;
import java.io.File;
import java.io.IOException;
import static org.apache.hadoop.metadata.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
import static org.apache.hadoop.metadata.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
/**
*
*/
public class BaseSSLAndKerberosTest extends BaseSecurityTest {
public static final String TESTUSER = "testuser";
public static final String TESTPASS = "testpass";
protected static final String DGI_URL = "https://localhost:21443/";
protected Path jksPath;
protected String providerUrl;
protected File httpKeytabFile;
private File userKeytabFile;
class TestSecureEmbeddedServer extends SecureEmbeddedServer {
public TestSecureEmbeddedServer(int port, String path) throws IOException {
super(port, path);
}
public Server getServer() {
return server;
}
@Override
public PropertiesConfiguration getConfiguration() {
return super.getConfiguration();
}
}
protected void setupCredentials() throws Exception {
Configuration conf = new Configuration(false);
File file = new File(jksPath.toUri().getPath());
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void setupKDCAndPrincipals() throws Exception {
// set up the KDC
File kdcWorkDir = startKDC();
userKeytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
httpKeytabFile = createKeytab(kdc, kdcWorkDir, "HTTP", "spnego.service.keytab");
// create a test user principal
kdc.createPrincipal(TESTUSER, TESTPASS);
StringBuilder jaas = new StringBuilder(1024);
jaas.append("TestUser {\n" +
" com.sun.security.auth.module.Krb5LoginModule required\nuseTicketCache=true;\n" +
"};\n");
jaas.append(createJAASEntry("Client", "dgi", userKeytabFile));
jaas.append(createJAASEntry("Server", "HTTP", httpKeytabFile));
File jaasFile = new File(kdcWorkDir, "jaas.txt");
FileUtils.write(jaasFile, jaas.toString());
bindJVMtoJAASFile(jaasFile);
}
protected String getWarPath() {
return String.format("/../../webapp/target/metadata-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
protected HiveConf getHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
return hiveConf;
}
}
......@@ -106,7 +106,7 @@ public class HiveHookIT {
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "tableName", tableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
......@@ -114,10 +114,7 @@ public class HiveHookIT {
}
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
JSONObject result = dgiCLient.rawSearch(typeName, colName, colValue);
JSONArray results = (JSONArray) result.get("results");
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
Assert.assertEquals(results.length(), 1);
JSONObject resultRow = (JSONObject) results.get(0);
Assert.assertEquals(resultRow.get(typeName + "." + colName), colValue);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileWriter;
import java.net.URL;
import java.nio.file.Files;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
* Perform all the necessary setup steps for client and server comm over SSL/Kerberos, but then don't estalish a
* kerberos user for the invocation. Need a separate use case since the Jersey layer cached the URL connection handler,
* which indirectly caches the kerberos delegation token.
*/
public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
private Driver driver;
private SessionState ss;
private TestSecureEmbeddedServer secureEmbeddedServer;
private String originalConf;
@BeforeClass
public void setUp() throws Exception {
//Set-up hive session
HiveConf conf = getHiveConf();
driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name"));
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String persistDir = null;
URL resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
// delete prior ssl-client.xml file
resource = NegativeSSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
if (resource != null) {
File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
if (sslClientFile != null && sslClientFile.exists()) {
sslClientFile.delete();
}
}
setupKDCAndPrincipals();
setupCredentials();
// client will actually only leverage subset of these properties
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("metadata.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
String confLocation = System.getProperty("metadata.conf");
URL url;
if (confLocation == null) {
url = PropertiesUtil.class.getResource("/application.properties");
} else {
url = new File(confLocation, "application.properties").toURI().toURL();
}
configuration.load(url);
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty("metadata.http.authentication.enabled", "true");
configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
configuration.setProperty("metadata.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
public PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.getServer().setHandler(webapp);
// save original setting
originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", persistDir);
secureEmbeddedServer.getServer().start();
}
@AfterClass
public void tearDown() throws Exception {
if (secureEmbeddedServer != null) {
secureEmbeddedServer.getServer().stop();
}
if (kdc != null) {
kdc.stop();
}
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
}
}
private void runCommand(final String cmd) throws Exception {
ss.setCommandType(null);
driver.run(cmd);
Assert.assertNotNull(driver.getErrorMsg());
Assert.assertTrue(driver.getErrorMsg().contains("Mechanism level: Failed to find any Kerberos tgt"));
}
@Test
public void testUnsecuredCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.security.auth.Subject;
import javax.security.auth.callback.*;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
public static final String TEST_USER_JAAS_SECTION = "TestUser";
public static final String TESTUSER = "testuser";
public static final String TESTPASS = "testpass";
private static final String DGI_URL = "https://localhost:21443/";
private Driver driver;
private MetadataServiceClient dgiCLient;
private SessionState ss;
private TestSecureEmbeddedServer secureEmbeddedServer;
private Subject subject;
private String originalConf;
@BeforeClass
public void setUp() throws Exception {
//Set-up hive session
HiveConf conf = getHiveConf();
driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name"));
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String persistDir = null;
URL resource = SSLAndKerberosHiveHookIT.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
// delete prior ssl-client.xml file
resource = SSLAndKerberosHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
if (resource != null) {
File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
if (sslClientFile != null && sslClientFile.exists()) {
sslClientFile.delete();
}
}
setupKDCAndPrincipals();
setupCredentials();
// client will actually only leverage subset of these properties
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty("metadata.http.authentication.type", "kerberos");
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
String confLocation = System.getProperty("metadata.conf");
URL url;
if (confLocation == null) {
url = PropertiesUtil.class.getResource("/application.properties");
} else {
url = new File(confLocation, "application.properties").toURI().toURL();
}
configuration.load(url);
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty("metadata.http.authentication.enabled", "true");
configuration.setProperty("metadata.http.authentication.kerberos.principal", "HTTP/localhost@" + kdc.getRealm());
configuration.setProperty("metadata.http.authentication.kerberos.keytab", httpKeytabFile.getAbsolutePath());
configuration.setProperty("metadata.http.authentication.kerberos.name.rules",
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\nDEFAULT");
configuration.save(new FileWriter(persistDir + File.separator + "application.properties"));
dgiCLient = new MetadataServiceClient(DGI_URL) {
@Override
protected PropertiesConfiguration getClientProperties() throws MetadataException {
return configuration;
}
};
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
public PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.getServer().setHandler(webapp);
// save original setting
originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", persistDir);
secureEmbeddedServer.getServer().start();
subject = loginTestUser();
}
@AfterClass
public void tearDown() throws Exception {
if (secureEmbeddedServer != null) {
secureEmbeddedServer.getServer().stop();
}
if (kdc != null) {
kdc.stop();
}
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
}
}
protected Subject loginTestUser() throws LoginException, IOException {
LoginContext lc = new LoginContext(TEST_USER_JAAS_SECTION, new CallbackHandler() {
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (int i = 0; i < callbacks.length; i++) {
if (callbacks[i] instanceof PasswordCallback) {
PasswordCallback passwordCallback = (PasswordCallback) callbacks[i];
passwordCallback.setPassword(TESTPASS.toCharArray());
}
if (callbacks[i] instanceof NameCallback) {
NameCallback nameCallback = (NameCallback) callbacks[i];
nameCallback.setName(TESTUSER);
}
}
}
});
// attempt authentication
lc.login();
return lc.getSubject();
}
private void runCommand(final String cmd) throws Exception {
ss.setCommandType(null);
Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
driver.run(cmd);
return null;
}
});
}
@Test
public void testCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName);
}
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
Assert.assertEquals(results.length(), 1);
return null;
}
});
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.hive.hook;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.MetadataServiceClient;
import org.apache.hadoop.metadata.hive.bridge.HiveMetaStoreBridge;
import org.apache.hadoop.metadata.hive.model.HiveDataTypes;
import org.apache.hadoop.metadata.security.SecurityProperties;
import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.ssl.SSLHostnameVerifier;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.webapp.WebAppContext;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
public class SSLHiveHookIT {
private static final String DGI_URL = "https://localhost:21443/";
private Driver driver;
private MetadataServiceClient dgiCLient;
private SessionState ss;
private Path jksPath;
private String providerUrl;
private TestSecureEmbeddedServer secureEmbeddedServer;
class TestSecureEmbeddedServer extends SecureEmbeddedServer {
public TestSecureEmbeddedServer(int port, String path) throws IOException {
super(port, path);
}
public Server getServer () { return server; }
@Override
public PropertiesConfiguration getConfiguration() {
return super.getConfiguration();
}
}
@BeforeClass
public void setUp() throws Exception {
//Set-up hive session
HiveConf conf = getHiveConf();
driver = new Driver(conf);
ss = new SessionState(conf, System.getProperty("user.name"));
ss = SessionState.start(ss);
SessionState.setCurrentSessionState(ss);
jksPath = new Path(Files.createTempDirectory("tempproviders").toString(), "test.jks");
providerUrl = JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
String persistDir = null;
URL resource = SSLHiveHookIT.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
// delete prior ssl-client.xml file
resource = SSLHiveHookIT.class.getResource("/" + SecurityProperties.SSL_CLIENT_PROPERTIES);
if (resource != null) {
File sslClientFile = new File(persistDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
if (sslClientFile != null && sslClientFile.exists()) {
sslClientFile.delete();
}
}
setupCredentials();
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(TLS_ENABLED, true);
configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/metadata.keystore");
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
dgiCLient = new MetadataServiceClient(DGI_URL) {
@Override
protected PropertiesConfiguration getClientProperties() throws MetadataException {
return configuration;
}
};
secureEmbeddedServer = new TestSecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
@Override
public PropertiesConfiguration getConfiguration() {
return configuration;
}
};
WebAppContext webapp = new WebAppContext();
webapp.setContextPath("/");
webapp.setWar(System.getProperty("user.dir") + getWarPath());
secureEmbeddedServer.getServer().setHandler(webapp);
secureEmbeddedServer.getServer().start();
}
@AfterClass
public void tearDown() throws Exception {
if (secureEmbeddedServer != null) {
secureEmbeddedServer.getServer().stop();
}
}
protected void setupCredentials() throws Exception {
Configuration conf = new Configuration(false);
File file = new File(jksPath.toUri().getPath());
file.delete();
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
CredentialProvider provider =
CredentialProviderFactory.getProviders(conf).get(0);
// create new aliases
try {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
"ssl.client.truststore.password", trustpass2);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
protected String getWarPath() {
return String.format("/../../webapp/target/metadata-webapp-%s",
System.getProperty("project.version", "0.1-incubating-SNAPSHOT"));
}
private HiveConf getHiveConf() {
HiveConf hiveConf = new HiveConf(this.getClass());
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/metastore");
hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
hiveConf.set("hive.hook.dgi.synchronous", "true");
return hiveConf;
}
private void runCommand(String cmd) throws Exception {
ss.setCommandType(null);
driver.run(cmd);
}
@Test
public void testCreateDatabase() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
assertDatabaseIsRegistered(dbName);
}
@Test
public void testCreateTable() throws Exception {
String dbName = "db" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create database " + dbName);
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + dbName + "." + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
assertTableIsRegistered(tableName);
//Create table where database doesn't exist, will create database instance as well
assertDatabaseIsRegistered("default");
}
@Test
public void testCTAS() throws Exception {
String tableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
runCommand("create table " + tableName + "(id int, name string)");
String newTableName = "table" + RandomStringUtils.randomAlphanumeric(5).toLowerCase();
String query = "create table " + newTableName + " as select * from " + tableName;
runCommand(query);
assertTableIsRegistered(newTableName);
assertInstanceIsRegistered(HiveDataTypes.HIVE_PROCESS.getName(), "queryText", query);
}
private void assertTableIsRegistered(String tableName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_TABLE.getName(), "name", tableName);
}
private void assertDatabaseIsRegistered(String dbName) throws Exception {
assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
}
private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
Assert.assertEquals(results.length(), 1);
}
}
......@@ -38,13 +38,71 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minikdc</artifactId>
<version>${hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<inherited>true</inherited>
<extensions>true</extensions>
</plugin>
</plugins>
</build>
</project>
......@@ -22,9 +22,18 @@ import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.security.SecureClientUtils;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
import org.apache.hadoop.metadata.typesystem.Referenceable;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.Serialization;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.MediaType;
......@@ -33,30 +42,48 @@ import javax.ws.rs.core.UriBuilder;
import java.util.ArrayList;
import java.util.List;
import static org.apache.hadoop.metadata.security.SecurityProperties.TLS_ENABLED;
/**
* Client for metadata.
*/
public class MetadataServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClient.class);
public static final String REQUEST_ID = "requestId";
public static final String RESULTS = "results";
public static final String TOTAL_SIZE = "totalSize";
private static final String BASE_URI = "api/metadata/";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities";
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
private final WebResource service;
private WebResource service;
public MetadataServiceClient(String baseUrl) {
DefaultClientConfig config = new DefaultClientConfig();
Client client = Client.create(config);
PropertiesConfiguration clientConfig = null;
try {
clientConfig = getClientProperties();
if (clientConfig.getBoolean(TLS_ENABLED) || clientConfig.getString("metadata.http.authentication.type") != null) {
// create an SSL properties configuration if one doesn't exist. SSLFactory expects a file, so forced to create a
// configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
SecureClientUtils.persistSSLClientConfiguration(clientConfig);
}
} catch (Exception e) {
LOG.info("Error processing client configuration.", e);
}
URLConnectionClientHandler handler = SecureClientUtils.getClientConnectionHandler(config, clientConfig);
Client client = new Client(handler, config);
client.resource(UriBuilder.fromUri(baseUrl).build());
service = client.resource(UriBuilder.fromUri(baseUrl).build());
}
private static final String BASE_URI = "api/metadata/";
private static final String URI_TYPES = "types";
private static final String URI_ENTITIES = "entities";
private static final String URI_TRAITS = "traits";
private static final String URI_SEARCH = "discovery/search";
protected PropertiesConfiguration getClientProperties() throws MetadataException {
return PropertiesUtil.getClientProperties();
}
static enum API {
//Type operations
......@@ -155,8 +182,14 @@ public class MetadataServiceClient {
* @return result json object
* @throws MetadataServiceException
*/
public JSONObject getEntity(String guid) throws MetadataServiceException {
return callAPI(API.GET_ENTITY, null, guid);
public Referenceable getEntity(String guid) throws MetadataServiceException {
JSONObject jsonResponse = callAPI(API.GET_ENTITY, null, guid);
try {
String entityInstanceDefinition = jsonResponse.getString(MetadataServiceClient.RESULTS);
return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
}
public JSONObject searchEntity(String searchQuery) throws MetadataServiceException {
......@@ -173,14 +206,14 @@ public class MetadataServiceClient {
* @return result json object
* @throws MetadataServiceException
*/
public JSONObject rawSearch(String typeName, String attributeName,
Object attributeValue) throws MetadataServiceException {
String gremlinQuery = String.format(
"g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
typeName, typeName, attributeName, attributeValue);
return searchByGremlin(gremlinQuery);
// String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
// return searchByDSL(dslQuery);
public JSONArray rawSearch(String typeName, String attributeName, Object attributeValue) throws
MetadataServiceException {
// String gremlinQuery = String.format(
// "g.V.has(\"typeName\",\"%s\").and(_().has(\"%s.%s\", T.eq, \"%s\")).toList()",
// typeName, typeName, attributeName, attributeValue);
// return searchByGremlin(gremlinQuery);
String dslQuery = String.format("%s where %s = \"%s\"", typeName, attributeName, attributeValue);
return searchByDSL(dslQuery);
}
/**
......@@ -189,10 +222,15 @@ public class MetadataServiceClient {
* @return result json object
* @throws MetadataServiceException
*/
public JSONObject searchByDSL(String query) throws MetadataServiceException {
public JSONArray searchByDSL(String query) throws MetadataServiceException {
WebResource resource = getResource(API.SEARCH_DSL);
resource = resource.queryParam("query", query);
return callAPIWithResource(API.SEARCH_DSL, resource);
JSONObject result = callAPIWithResource(API.SEARCH_DSL, resource);
try {
return result.getJSONObject("results").getJSONArray("rows");
} catch (JSONException e) {
throw new MetadataServiceException(e);
}
}
/**
......
......@@ -31,20 +31,30 @@ public class PropertiesUtil {
private static final Logger LOG = LoggerFactory.getLogger(PropertiesUtil.class);
private static final String APPLICATION_PROPERTIES = "application.properties";
public static final String CLIENT_PROPERTIES = "client.properties";
public static final PropertiesConfiguration getApplicationProperties() throws MetadataException {
return getPropertiesConfiguration(APPLICATION_PROPERTIES);
}
public static final PropertiesConfiguration getClientProperties() throws MetadataException {
return getPropertiesConfiguration(CLIENT_PROPERTIES);
}
private static PropertiesConfiguration getPropertiesConfiguration(String name) throws MetadataException {
String confLocation = System.getProperty("metadata.conf");
URL url;
try {
if (confLocation == null) {
url = PropertiesUtil.class.getResource("/" + APPLICATION_PROPERTIES);
url = PropertiesUtil.class.getResource("/" + name);
} else {
url = new File(confLocation, APPLICATION_PROPERTIES).toURI().toURL();
url = new File(confLocation, name).toURI().toURL();
}
LOG.info("Loading {} from {}", APPLICATION_PROPERTIES, url);
LOG.info("Loading {} from {}", name, url);
return new PropertiesConfiguration(url);
} catch (Exception e) {
throw new MetadataException("Failed to load application properties", e);
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.security;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.Authenticator;
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticator;
import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.security.GeneralSecurityException;
import java.security.PrivilegedExceptionAction;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
*
*/
public class SecureClientUtils {
public final static int DEFAULT_SOCKET_TIMEOUT = 1 * 60 * 1000; // 1 minute
private static final Logger LOG = LoggerFactory.getLogger(SecureClientUtils.class);
public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
PropertiesConfiguration clientConfig) {
config.getProperties().put(
URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
true);
Configuration conf = new Configuration(false);
conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, "ssl-client.xml"));
String authType = "simple";
if (clientConfig != null) {
authType = clientConfig.getString("metadata.http.authentication.type", "simple");
}
UserGroupInformation.setConfiguration(conf);
final ConnectionConfigurator connConfigurator = newConnConfigurator(conf);
Authenticator authenticator = new PseudoDelegationTokenAuthenticator();
if (!authType.equals("simple")) {
authenticator = new KerberosDelegationTokenAuthenticator();
}
authenticator.setConnectionConfigurator(connConfigurator);
final DelegationTokenAuthenticator finalAuthenticator = (DelegationTokenAuthenticator) authenticator;
final DelegationTokenAuthenticatedURL.Token token = new DelegationTokenAuthenticatedURL.Token();
HttpURLConnectionFactory httpURLConnectionFactory = new HttpURLConnectionFactory() {
@Override
public HttpURLConnection getHttpURLConnection(final URL url) throws IOException {
try {
return new DelegationTokenAuthenticatedURL(finalAuthenticator, connConfigurator)
.openConnection(url, token, null);
} catch (Exception e) {
throw new IOException(e);
}
}
};
return new URLConnectionClientHandler(httpURLConnectionFactory);
}
private final static ConnectionConfigurator DEFAULT_TIMEOUT_CONN_CONFIGURATOR =
new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
setTimeouts(conn, DEFAULT_SOCKET_TIMEOUT);
return conn;
}
};
private static ConnectionConfigurator newConnConfigurator(Configuration conf) {
try {
return newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
} catch (Exception e) {
LOG.debug("Cannot load customized ssl related configuration. " +
"Fallback to system-generic settings.", e);
return DEFAULT_TIMEOUT_CONN_CONFIGURATOR;
}
}
private static ConnectionConfigurator newSslConnConfigurator(final int timeout,
Configuration conf) throws IOException, GeneralSecurityException {
final SSLFactory factory;
final SSLSocketFactory sf;
final HostnameVerifier hv;
factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
factory.init();
sf = factory.createSSLSocketFactory();
hv = factory.getHostnameVerifier();
return new ConnectionConfigurator() {
@Override
public HttpURLConnection configure(HttpURLConnection conn)
throws IOException {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
c.setHostnameVerifier(hv);
}
setTimeouts(conn, timeout);
return conn;
}
};
}
private static void setTimeouts(URLConnection connection, int socketTimeout) {
connection.setConnectTimeout(socketTimeout);
connection.setReadTimeout(socketTimeout);
}
private static File getSSLClientFile() throws MetadataException {
String confLocation = System.getProperty("metadata.conf");
File sslDir;
try {
if (confLocation == null) {
String persistDir = null;
URL resource = PropertiesUtil.class.getResource("/");
if (resource != null) {
persistDir = resource.toURI().getPath();
}
assert persistDir != null;
sslDir = new File(persistDir);
} else {
sslDir = new File(confLocation);
}
LOG.info("ssl-client.xml will be created in {}", sslDir);
} catch (Exception e) {
throw new MetadataException("Failed to find client configuration directory", e);
}
return new File(sslDir, SecurityProperties.SSL_CLIENT_PROPERTIES);
}
public static void persistSSLClientConfiguration(PropertiesConfiguration clientConfig) throws MetadataException, IOException {
//trust settings
Configuration configuration = new Configuration(false);
File sslClientFile = getSSLClientFile();
if (!sslClientFile.exists()) {
configuration.set("ssl.client.truststore.type", "jks");
configuration.set("ssl.client.truststore.location", clientConfig.getString(TRUSTSTORE_FILE_KEY));
if (clientConfig.getBoolean(CLIENT_AUTH_KEY, false)) {
// need to get client key properties
configuration.set("ssl.client.keystore.location", clientConfig.getString(KEYSTORE_FILE_KEY));
configuration.set("ssl.client.keystore.type", "jks");
}
// add the configured credential provider
configuration.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
clientConfig.getString(CERT_STORES_CREDENTIAL_PROVIDER_PATH));
String hostnameVerifier = clientConfig.getString(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY);
if (hostnameVerifier != null) {
configuration.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, hostnameVerifier);
}
configuration.writeXml(new FileWriter(sslClientFile));
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.security;
/**
*
*/
public interface SecurityProperties {
public static final String TLS_ENABLED = "metadata.enableTLS";
public static final String KEYSTORE_FILE_KEY = "keystore.file";
public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String KEYSTORE_PASSWORD_KEY = "keystore.password";
public static final String TRUSTSTORE_FILE_KEY = "truststore.file";
public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password";
public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
String SSL_CLIENT_PROPERTIES = "ssl-client.xml";
}
......@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.web;
package org.apache.hadoop.metadata.security;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
......
......@@ -823,7 +823,9 @@
<!--<skipTests>true</skipTests>-->
<forkMode>always</forkMode>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<argLine>-Djava.awt.headless=true</argLine>
<argLine>-Djava.awt.headless=true -Dproject.version=${project.version}
-Dhadoop.tmp.dir=${project.build.directory}/tmp-hadoop-${user.name}
-Xmx1024m -XX:MaxPermSize=512m</argLine>
</configuration>
<dependencies>
<dependency>
......@@ -943,6 +945,7 @@
<exclude>**/maven-eclipse.xml</exclude>
<exclude>**/.externalToolBuilders/**</exclude>
<exclude>dashboard/**</exclude>
<exclude>**/build.log</exclude>
</excludes>
</configuration>
<executions>
......
......@@ -40,6 +40,11 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
......
......@@ -100,17 +100,18 @@ public class HiveLineageService implements LineageService {
public String getOutputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage outputs for tableName={}", tableName);
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
HiveWhereUsedQuery outputsQuery = new HiveWhereUsedQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = outputsQuery.expr();
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e);
throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
}
}
......@@ -124,17 +125,18 @@ public class HiveLineageService implements LineageService {
public String getInputs(String tableName) throws DiscoveryException {
LOG.info("Fetching lineage inputs for tableName={}", tableName);
try {
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
HiveLineageQuery inputsQuery = new HiveLineageQuery(
HIVE_TABLE_TYPE_NAME, tableName, HIVE_PROCESS_TYPE_NAME,
HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME,
Option.empty(), SELECT_ATTRIBUTES, true,
graphPersistenceStrategy, titanGraph);
Expressions.Expression expression = inputsQuery.expr();
Expressions.Expression expression = inputsQuery.expr();
LOG.debug("Expression is [" + expression.toString() +"]");
try {
return discoveryService.evaluate(expression).toJson();
} catch (Exception e) { // unable to catch ExpressionException
throw new DiscoveryException("Invalid expression", e);
throw new DiscoveryException("Invalid expression [" + expression.toString() + "]", e);
}
}
......@@ -148,9 +150,10 @@ public class HiveLineageService implements LineageService {
public String getSchema(String tableName) throws DiscoveryException {
// todo - validate if indeed this is a table type and exists
String schemaQuery = HIVE_TABLE_TYPE_NAME
+ " where name=\"" + tableName + "\", "
+ HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME;
// + " as column select column.name, column.dataType, column.comment";
+ " where name=\"" + tableName + "\""
+ ", " + HIVE_TABLE_COLUMNS_ATTRIBUTE_NAME
// + " as column select column.name, column.dataType, column.comment"
;
return discoveryService.searchByDSL(schemaQuery);
}
}
......@@ -73,7 +73,7 @@ public class GraphBackedDiscoveryService implements DiscoveryService {
this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
}
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.0/index-backends.html for indexed query
//Refer http://s3.thinkaurelius.com/docs/titan/0.5.4/index-backends.html for indexed query
//http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
// .html#query-string-syntax for query syntax
@Override
......
......@@ -128,7 +128,16 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
@Override
public String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) {
return EDGE_LABEL_PREFIX + dataType.getName() + "." + aInfo.name;
return getEdgeLabel(dataType.getName(), aInfo.name);
}
public String getEdgeLabel(String typeName, String attrName) {
return EDGE_LABEL_PREFIX + typeName + "." + attrName;
}
public String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws MetadataException {
IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
return getEdgeLabel(dataType, aInfo);
}
@Override
......@@ -275,7 +284,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
}
final String entityTypeName = getTypeName(instanceVertex);
String relationshipLabel = entityTypeName + "." + traitNameToBeDeleted;
String relationshipLabel = getEdgeLabel(entityTypeName, traitNameToBeDeleted);
Iterator<Edge> results = instanceVertex.getEdges(
Direction.OUT, relationshipLabel).iterator();
if (results.hasNext()) { // there should only be one edge for this label
......@@ -673,6 +682,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
Object attrValue = typedInstance.get(attributeInfo.name);
LOG.debug("mapping attribute {} = {}", attributeInfo.name, attrValue);
final String propertyName = getQualifiedName(typedInstance, attributeInfo);
String edgeLabel = getEdgeLabel(typedInstance, attributeInfo);
if (attrValue == null) {
return;
}
......@@ -698,11 +708,10 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case STRUCT:
Vertex structInstanceVertex = mapStructInstanceToVertex(id,
(ITypedStruct) typedInstance.get(attributeInfo.name),
attributeInfo, idToVertexMap);
(ITypedStruct) typedInstance.get(attributeInfo.name), attributeInfo, idToVertexMap);
// add an edge to the newly created vertex from the parent
GraphHelper.addEdge(
titanGraph, instanceVertex, structInstanceVertex, propertyName);
titanGraph, instanceVertex, structInstanceVertex, edgeLabel);
break;
case TRAIT:
......@@ -712,7 +721,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
case CLASS:
Id referenceId = (Id) typedInstance.get(attributeInfo.name);
mapClassReferenceAsEdge(
instanceVertex, idToVertexMap, propertyName, referenceId);
instanceVertex, idToVertexMap, edgeLabel, referenceId);
break;
default:
......@@ -886,7 +895,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
traitInstance.fieldMapping().fields, idToVertexMap);
// add an edge to the newly created vertex from the parent
String relationshipLabel = typedInstanceTypeName + "." + traitName;
String relationshipLabel = getEdgeLabel(typedInstanceTypeName, traitName);
GraphHelper.addEdge(
titanGraph, parentInstanceVertex, traitInstanceVertex, relationshipLabel);
}
......@@ -1017,7 +1026,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
break;
case CLASS:
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo);
String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
Object idOrInstance = mapClassReferenceToVertex(instanceVertex,
attributeInfo, relationshipLabel, attributeInfo.dataType());
typedInstance.set(attributeInfo.name, idOrInstance);
......@@ -1221,7 +1230,7 @@ public class GraphBackedMetadataRepository implements MetadataRepository {
ITypedStruct structInstance = structType.createInstance();
typedInstance.set(attributeInfo.name, structInstance);
String relationshipLabel = getQualifiedName(typedInstance, attributeInfo);
String relationshipLabel = getEdgeLabel(typedInstance, attributeInfo);
LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
for (Edge edge : instanceVertex.getEdges(Direction.OUT, relationshipLabel)) {
final Vertex structInstanceVertex = edge.getVertex(Direction.IN);
......
......@@ -219,7 +219,8 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
case CLASS:
// this is only A reference, index the attribute for edge
createEdgeMixedIndex(propertyName);
// Commenting this out since we do not need an index for edge here
//createEdgeMixedIndex(propertyName);
break;
default:
......@@ -314,15 +315,23 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
.dataType(propertyClass)
.make();
TitanGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
management.addIndexKey(vertexIndex, propertyKey);
management.commit();
if (propertyClass == Boolean.class) {
//Use standard index as backing index only supports string, int and geo types
management.buildIndex(propertyName, Vertex.class).addKey(propertyKey).buildCompositeIndex();
management.commit();
} else {
//Use backing index
TitanGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
management.addIndexKey(vertexIndex, propertyKey);
management.commit();
}
LOG.info("Created mixed vertex index for property {}", propertyName);
}
return propertyKey;
}
/* Commenting this out since we do not need an index for edge label here
private void createEdgeMixedIndex(String propertyName) {
TitanManagement management = titanGraph.getManagementSystem();
EdgeLabel edgeLabel = management.getEdgeLabel(propertyName);
......@@ -332,5 +341,5 @@ public class GraphBackedSearchIndexer implements SearchIndexer {
management.commit();
LOG.info("Created index for edge label {}", propertyName);
}
}
} */
}
......@@ -191,7 +191,7 @@ public class DefaultMetadataService implements MetadataService {
Preconditions.checkNotNull(guid, "guid cannot be null");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return Serialization$.MODULE$.toJson(instance);
return InstanceSerialization.toJson(instance, true);
}
/**
......
......@@ -104,7 +104,6 @@ class GremlinEvaluator(qry: GremlinQuery, persistenceStrategy: GraphPersistenceS
val v = rV.getColumn(src).get(idx)
sInstance.set(cName, persistenceStrategy.constructInstance(aE.dataType, v))
}
sInstance
addPathStruct(r, sInstance)
}
GremlinQueryResult(qry.expr.toString, rType, rows.toList)
......@@ -137,4 +136,4 @@ object JsonHelper {
def toJson(r: GremlinQueryResult): String = {
writePretty(r)
}
}
\ No newline at end of file
}
......@@ -29,6 +29,7 @@ import org.apache.hadoop.metadata.discovery.graph.GraphBackedDiscoveryService;
import org.apache.hadoop.metadata.query.HiveTitanSample;
import org.apache.hadoop.metadata.query.QueryTestsUtils;
import org.apache.hadoop.metadata.repository.graph.GraphBackedMetadataRepository;
import org.apache.hadoop.metadata.repository.graph.GraphBackedSearchIndexer;
import org.apache.hadoop.metadata.repository.graph.GraphHelper;
import org.apache.hadoop.metadata.repository.graph.GraphProvider;
import org.apache.hadoop.metadata.typesystem.ITypedReferenceableInstance;
......@@ -224,6 +225,7 @@ public class GraphBackedDiscoveryServiceTest {
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable withPath"},
{"Table as t, sd, Column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as colType"},
{"Table where name='sales_fact', db where name='Reporting'"}
};
}
......@@ -268,39 +270,6 @@ public class GraphBackedDiscoveryServiceTest {
}
@Test
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "Column as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "Column");
String name = row.getString("name");
Assert.assertNotEquals(name, "null");
}
}
@Test
public void testSearchForTypeInheritance() throws Exception {
createTypesWithMultiLevelInheritance();
createInstances();
......
......@@ -145,9 +145,8 @@ public class GraphBackedMetadataRepositoryTest {
@Test (dependsOnMethods = "testSubmitEntity")
public void testGetTraitLabel() throws Exception {
Assert.assertEquals(repositoryService.getTraitLabel(
typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
Assert.assertEquals(repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TABLE_TYPE),
CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
}
@Test
......@@ -317,6 +316,39 @@ public class GraphBackedMetadataRepositoryTest {
Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
}
@Test(dependsOnMethods = "testCreateEntity")
public void testSearchByDSLQuery() throws Exception {
String dslQuery = "hive_database as PII";
System.out.println("Executing dslQuery = " + dslQuery);
String jsonResults = discoveryService.searchByDSL(dslQuery);
Assert.assertNotNull(jsonResults);
JSONObject results = new JSONObject(jsonResults);
Assert.assertEquals(results.length(), 3);
System.out.println("results = " + results);
Object query = results.get("query");
Assert.assertNotNull(query);
JSONObject dataType = results.getJSONObject("dataType");
Assert.assertNotNull(dataType);
String typeName = dataType.getString("typeName");
Assert.assertNotNull(typeName);
JSONArray rows = results.getJSONArray("rows");
Assert.assertNotNull(rows);
Assert.assertTrue(rows.length() > 0);
for (int index = 0; index < rows.length(); index++) {
JSONObject row = rows.getJSONObject(index);
String type = row.getString("$typeName$");
Assert.assertEquals(type, "hive_database");
String name = row.getString("name");
Assert.assertEquals(name, DATABASE_NAME);
}
}
/**
* Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
* GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
......
......@@ -99,7 +99,7 @@ mkdir -p $METADATA_LOG_DIR
pushd ${BASEDIR} > /dev/null
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF}"
JAVA_PROPERTIES="$METADATA_OPTS $METADATA_PROPERTIES -Dmetadata.log.dir=$METADATA_LOG_DIR -Dmetadata.home=${METADATA_HOME_DIR} -Dmetadata.conf=${METADATA_CONF} -Dmetadata.log.file=application.log"
shift
while [[ ${1} =~ ^\-D ]]; do
......
......@@ -20,8 +20,9 @@ import sys
import metadata_config as mc
METADATA_LOG_OPTS="-Dmetadata.log.dir=%s"
METADATA_LOG_OPTS="-Dmetadata.log.dir=%s -Dmetadata.log.file=application.log"
METADATA_COMMAND_OPTS="-Dmetadata.home=%s"
METADATA_CONFIG_OPTS="-Dmetadata.conf=%s"
DEFAULT_JVM_OPTS="-Xmx1024m"
def main():
......@@ -37,6 +38,9 @@ def main():
cmd_opts = (METADATA_COMMAND_OPTS % metadata_home)
jvm_opts_list.extend(cmd_opts.split())
config_opts = (METADATA_CONFIG_OPTS % confdir)
jvm_opts_list.extend(config_opts.split())
default_jvm_opts = DEFAULT_JVM_OPTS
metadata_jvm_opts = os.environ.get(mc.METADATA_OPTS, default_jvm_opts)
jvm_opts_list.extend(metadata_jvm_opts.split())
......
......@@ -30,12 +30,13 @@ metadata.graph.index.search.elasticsearch.create.sleep=2000
######### Hive Lineage Configs #########
# This models follows the quick-start guide
metadata.lineage.hive.table.type.name=Table
metadata.lineage.hive.column.type.name=Column
metadata.lineage.hive.table.type.name=hive_table
metadata.lineage.hive.table.column.name=columns
metadata.lineage.hive.process.type.name=LoadProcess
metadata.lineage.hive.process.type.name=hive_process
metadata.lineage.hive.process.inputs.name=inputTables
metadata.lineage.hive.process.outputs.name=outputTables
#Currently unused
#metadata.lineage.hive.column.type.name=Column
######### Security Properties #########
......
......@@ -28,7 +28,7 @@
</appender>
<appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender">
<param name="File" value="${metadata.log.dir}/application.log"/>
<param name="File" value="${metadata.log.dir}/${metadata.log.file}"/>
<param name="Append" value="true"/>
<param name="Threshold" value="debug"/>
<layout class="org.apache.log4j.PatternLayout">
......
......@@ -50,13 +50,13 @@ class TestMetadata(unittest.TestCase):
'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib\\*:metadata_home/libext\\*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.home=metadata_home', '-Xmx1024m'])
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
else:
java_mock.assert_called_with(
'org.apache.hadoop.metadata.Main',
['-app', 'metadata_home/server/webapp/metadata'],
'metadata_home/conf:metadata_home/server/webapp/metadata/WEB-INF/classes:metadata_home/server/webapp/metadata/WEB-INF/lib/*:metadata_home/libext/*',
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.home=metadata_home', '-Xmx1024m'])
['-Dmetadata.log.dir=metadata_home/logs', '-Dmetadata.log.file=application.log', '-Dmetadata.home=metadata_home', '-Dmetadata.conf=metadata_home/conf', '-Xmx1024m'])
pass
......
......@@ -20,6 +20,8 @@ package org.apache.hadoop.metadata.typesystem;
import org.apache.hadoop.metadata.MetadataException;
import java.util.Map;
/**
* Represents a Struct or Trait or Object.
*/
......@@ -31,4 +33,6 @@ public interface IInstance {
void set(String attrName, Object val) throws MetadataException;
Map<String, Object> getValuesMap() throws MetadataException;
}
......@@ -56,7 +56,7 @@ public class Struct implements IStruct {
values.put(attrName, value);
}
@InterfaceAudience.Private
@Override
public Map<String, Object> getValuesMap() {
return values;
}
......
......@@ -22,6 +22,9 @@ import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.typesystem.IStruct;
import org.apache.hadoop.metadata.typesystem.types.DownCastFieldMapping;
import java.util.HashMap;
import java.util.Map;
public class DownCastStructInstance implements IStruct {
public final String typeName;
......@@ -49,6 +52,20 @@ public class DownCastStructInstance implements IStruct {
public void set(String attrName, Object val) throws MetadataException {
fieldMapping.set(this, attrName, val);
}
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fieldNameMap.keySet()) {
m.put(attr, get(attr));
}
return m;
}
}
......@@ -27,6 +27,7 @@ import org.apache.hadoop.metadata.typesystem.types.FieldMapping;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.Map;
import java.util.UUID;
public class Id implements ITypedReferenceableInstance {
......@@ -142,6 +143,11 @@ public class Id implements ITypedReferenceableInstance {
return null;
}
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
throw new MetadataException("Get/Set not supported on an Id object");
}
public void setNull(String attrName) throws MetadataException {
set(attrName, null);
}
......
......@@ -36,6 +36,7 @@ import org.apache.hadoop.metadata.typesystem.types.ValueConversionException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
public class StructInstance implements ITypedStruct {
......@@ -233,6 +234,20 @@ public class StructInstance implements ITypedStruct {
nullFlags[nullPos] = true;
}
/*
* Use only for json serialization
* @nonpublic
*/
@Override
public Map<String, Object> getValuesMap() throws MetadataException {
Map<String,Object> m = new HashMap<>();
for (String attr : fieldMapping.fields.keySet()) {
m.put(attr, get(attr));
}
return m;
}
public boolean getBoolean(String attrName) throws MetadataException {
AttributeInfo i = fieldMapping.fields.get(attrName);
if (i == null) {
......
......@@ -266,17 +266,17 @@ object InstanceSerialization {
def asScala(v : Any) : Any = v match {
case i : Id => _Id(i._getId(), i.getVersion, i.getClassName)
case r : Referenceable => {
case r : IReferenceableInstance => {
val traits = r.getTraits.map { tName =>
val t = r.getTrait(tName).asInstanceOf[Struct]
val t = r.getTrait(tName).asInstanceOf[IStruct]
(tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
}.toMap
_Reference(asScala(r.getId).asInstanceOf[_Id],
r.typeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
asScala(r.getTraits).asInstanceOf[List[String]],
traits.asInstanceOf[Map[String, _Struct]])
}
case s : Struct => _Struct(s.typeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
case m : java.util.Map[_, _] => m.asScala.mapValues(v => asScala(v)).toMap
case _ => v
......@@ -301,7 +301,7 @@ object InstanceSerialization {
writePretty(_s)
}
def toJson(value: Struct, withBigDecimals : Boolean = false): String = {
def toJson(value: IStruct, withBigDecimals : Boolean = false): String = {
_toJson(value, withBigDecimals)
}
......
......@@ -19,8 +19,7 @@
package org.apache.hadoop.metadata.typesystem.json
import com.google.common.collect.ImmutableList
import org.apache.hadoop.metadata._
import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance}
import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance, Id}
import org.apache.hadoop.metadata.typesystem.types._
import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil
import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct}
......@@ -127,65 +126,7 @@ class SerializationTest extends BaseTest {
Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}")
}
@Test def testClass {
val ts: TypeSystem = getTypeSystem
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Department",
ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("employees", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, true, "department"))
val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Person", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"))
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Manager", ImmutableList.of[String]("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
Multiplicity.COLLECTION, false, "manager"))
val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] =
TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE))
ts.defineTypes(ImmutableList.of[StructTypeDefinition],
ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef),
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
)
val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
hrDept.set("name", "hr")
john.set("name", "John")
john.set("department", hrDept)
jane.set("name", "Jane")
jane.set("department", hrDept)
john.set("manager", jane)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john))
jane.getTrait("SecurityClearance").set("level", 1)
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept2}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept2)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
@Test def testReference {
val ts: TypeSystem = getTypeSystem
def defineHRTypes(ts: TypeSystem) : Unit = {
val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Department",
ImmutableList.of[String],
......@@ -196,8 +137,7 @@ class SerializationTest extends BaseTest {
"Person", ImmutableList.of[String],
TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")
)
new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"))
val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
"Manager", ImmutableList.of[String]("Person"),
new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
......@@ -211,6 +151,9 @@ class SerializationTest extends BaseTest {
ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
)
}
def defineHRDept() : Referenceable = {
val hrDept: Referenceable = new Referenceable("Department")
val john: Referenceable = new Referenceable("Person")
val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
......@@ -221,8 +164,36 @@ class SerializationTest extends BaseTest {
jane.set("department", hrDept.getId)
john.set("manager", jane.getId)
hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
jane.set("subordinates", ImmutableList.of[Referenceable](john))
jane.set("subordinates", ImmutableList.of[Id](john.getId))
jane.getTrait("SecurityClearance").set("level", 1)
hrDept
}
@Test def testClass {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept2}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept2)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
@Test def testReference {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val jsonStr = InstanceSerialization.toJson(hrDept)
......@@ -242,4 +213,30 @@ class SerializationTest extends BaseTest {
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
@Test def testReference2 {
val ts: TypeSystem = getTypeSystem
defineHRTypes(ts)
val hrDept: Referenceable = defineHRDept()
val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
val jsonStr = InstanceSerialization.toJson(hrDept2)
val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr)
val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED)
println(s"HR Dept Object Graph:\n${hrDept4}\n")
implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
val ser = swrite(hrDept4)
println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
}
}
\ No newline at end of file
......@@ -48,9 +48,17 @@
<artifactId>metadata-repository</artifactId>
</dependency>
<!--<dependency>-->
<!--<groupId>org.apache.hadoop.metadata</groupId>-->
<!--<artifactId>metadata-client</artifactId>-->
<!--</dependency>-->
<dependency>
<groupId>org.apache.hadoop.metadata</groupId>
<artifactId>metadata-client</artifactId>
<version>${version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
......@@ -214,6 +222,7 @@
<artifactId>maven-war-plugin</artifactId>
<version>2.4</version>
<configuration>
<attachClasses>true</attachClasses>
<webResources>
<resource>
<directory>../dashboard/v3</directory>
......
......@@ -26,13 +26,15 @@ import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import java.io.*;
import java.util.Arrays;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
* A utility class for generating a credential provider containing the entries required for supporting the SSL implementation
* of the DGC server.
*/
public class CredentialProviderUtility {
private static final String[] KEYS = new String[] {SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY,
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY};
private static final String[] KEYS = new String[] {KEYSTORE_PASSWORD_KEY,
TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY};
public static abstract class TextDevice {
public abstract void printf(String fmt, Object... params);
......
......@@ -21,6 +21,7 @@ package org.apache.hadoop.metadata.web.filters;
import com.google.inject.Singleton;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
......@@ -49,8 +50,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
PropertiesConfiguration configuration;
try {
configuration = new PropertiesConfiguration("application.properties");
} catch (ConfigurationException e) {
configuration = PropertiesUtil.getApplicationProperties();
} catch (Exception e) {
throw new ServletException(e);
}
......@@ -95,6 +96,8 @@ public class MetadataAuthenticationFilter extends AuthenticationFilter {
config.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
}
LOG.info("AuthenticationFilterConfig: {}", config);
return config;
}
......
......@@ -27,6 +27,7 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.metadata.RepositoryMetadataModule;
import org.apache.hadoop.metadata.repository.typestore.ITypeStore;
import org.apache.hadoop.metadata.typesystem.TypesDef;
......@@ -80,10 +81,13 @@ public class GuiceServletConfig extends GuiceServletContextListener {
}
private void configureAuthenticationFilter() throws ConfigurationException {
PropertiesConfiguration configuration =
new PropertiesConfiguration("application.properties");
if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) {
filter("/*").through(MetadataAuthenticationFilter.class);
try {
PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties();
if (Boolean.valueOf(configuration.getString(HTTP_AUTHENTICATION_ENABLED))) {
filter("/*").through(MetadataAuthenticationFilter.class);
}
} catch (MetadataException e) {
LOG.warn("Error loading configuration and initializing authentication filter", e);
}
}
});
......
......@@ -19,6 +19,8 @@ package org.apache.hadoop.metadata.web.listeners;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.PropertiesUtil;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
......@@ -120,7 +122,11 @@ public class LoginProcessor {
* @throws ConfigurationException
*/
protected PropertiesConfiguration getPropertiesConfiguration() throws ConfigurationException {
return new PropertiesConfiguration("application.properties");
try {
return PropertiesUtil.getApplicationProperties();
} catch (MetadataException e) {
throw new ConfigurationException(e);
}
}
/**
......
......@@ -28,23 +28,16 @@ import org.mortbay.jetty.security.SslSocketConnector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
* This is a jetty server which requires client auth via certificates.
*/
public class SecureEmbeddedServer extends EmbeddedServer {
public static final String KEYSTORE_FILE_KEY = "keystore.file";
public static final String DEFAULT_KEYSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String KEYSTORE_PASSWORD_KEY = "keystore.password";
public static final String TRUSTSTORE_FILE_KEY = "truststore.file";
public static final String DEFATULT_TRUSTORE_FILE_LOCATION = "target/metadata.keystore";
public static final String TRUSTSTORE_PASSWORD_KEY = "truststore.password";
public static final String SERVER_CERT_PASSWORD_KEY = "password";
public static final String CLIENT_AUTH_KEY = "client.auth.enabled";
public static final String CERT_STORES_CREDENTIAL_PROVIDER_PATH = "cert.stores.credential.provider.path";
private static final Logger LOG = LoggerFactory.getLogger(SecureEmbeddedServer.class);
public SecureEmbeddedServer(int port, String path) throws IOException {
......
......@@ -31,6 +31,8 @@ import java.io.IOException;
import java.nio.file.Files;
import java.util.*;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
*
*/
......@@ -75,11 +77,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY);
provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY);
entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY);
entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
}
......@@ -138,11 +140,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY);
provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY);
entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY);
entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
}
......@@ -192,11 +194,11 @@ public class CredentialProviderUtilityIT {
CredentialProviderFactory.getProviders(conf).get(0);
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY);
provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY);
entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY);
entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry);
}
......@@ -260,11 +262,11 @@ public class CredentialProviderUtilityIT {
char[] newpass = "newpass".toCharArray();
CredentialProvider.CredentialEntry entry =
provider.getCredentialEntry(SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY);
provider.getCredentialEntry(KEYSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass);
entry = provider.getCredentialEntry(SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY);
entry = provider.getCredentialEntry(TRUSTSTORE_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass);
entry = provider.getCredentialEntry(SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY);
entry = provider.getCredentialEntry(SERVER_CERT_PASSWORD_KEY);
assertCredentialEntryCorrect(entry, newpass);
}
}
......@@ -19,7 +19,7 @@ package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdfs.web.URLConnectionFactory;
import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server;
import org.testng.Assert;
......@@ -59,6 +59,9 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
@Test
public void testKerberosBasedLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
setupKDCAndPrincipals();
TestEmbeddedServer server = null;
......@@ -102,6 +105,12 @@ public class MetadataAuthenticationKerberosFilterIT extends BaseSecurityTest {
server.getServer().stop();
kdc.stop();
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
} else {
System.clearProperty("metadata.conf");
}
}
......
......@@ -17,7 +17,7 @@
package org.apache.hadoop.metadata.web.filters;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.metadata.web.service.EmbeddedServer;
import org.mortbay.jetty.Server;
import org.testng.Assert;
......@@ -45,6 +45,8 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
@Test
public void testSimpleLogin() throws Exception {
String originalConf = System.getProperty("metadata.conf");
System.setProperty("metadata.conf", System.getProperty("user.dir"));
generateSimpleLoginConfiguration();
TestEmbeddedServer server = new TestEmbeddedServer(23001, "webapp/target/metadata-governance");
......@@ -71,6 +73,11 @@ public class MetadataAuthenticationSimpleFilterIT extends BaseSecurityTest {
Assert.assertEquals(connection.getResponseCode(), 200);
} finally {
server.getServer().stop();
if (originalConf != null) {
System.setProperty("metadata.conf", originalConf);
} else {
System.clearProperty("metadata.conf");
}
}
......
......@@ -18,12 +18,10 @@ package org.apache.hadoop.metadata.web.listeners;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.metadata.web.BaseSecurityTest;
import org.apache.hadoop.metadata.security.BaseSecurityTest;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.testng.Assert;
import org.testng.annotations.Test;
......@@ -99,15 +97,6 @@ public class LoginProcessorIT extends BaseSecurityTest {
Assert.assertNotNull(kdc.getRealm());
File keytabFile = createKeytab(kdc, kdcWorkDir, "dgi", "dgi.keytab");
String dgiServerPrincipal = Shell.WINDOWS ? "dgi/127.0.0.1" : "dgi/localhost";
StringBuilder jaas = new StringBuilder(1024);
jaas.append(createJAASEntry("Client", "dgi", keytabFile));
jaas.append(createJAASEntry("Server", dgiServerPrincipal, keytabFile));
File jaasFile = new File(kdcWorkDir, "jaas.txt");
FileUtils.write(jaasFile, jaas.toString());
bindJVMtoJAASFile(jaasFile);
return keytabFile;
}
......
......@@ -28,6 +28,8 @@ import org.apache.hadoop.metadata.typesystem.TypesDef;
import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
import org.apache.hadoop.metadata.typesystem.persistence.Id;
import org.apache.hadoop.metadata.typesystem.types.ClassType;
import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
......@@ -59,8 +61,11 @@ public abstract class BaseResourceIT {
}
protected void createType(TypesDef typesDef) throws Exception {
String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON);
HierarchicalTypeDefinition<ClassType> sampleType = typesDef.classTypesAsJavaList().get(0);
if (serviceClient.getType(sampleType.typeName) == null ) {
String typesAsJSON = TypesSerialization.toJson(typesDef);
createType(typesAsJSON);
}
}
protected void createType(String typesAsJSON) throws Exception {
......
......@@ -150,6 +150,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
final String definition = response.getString(MetadataServiceClient.RESULTS);
Assert.assertNotNull(definition);
LOG.debug("tableInstanceAfterGet = " + definition);
InstanceSerialization.fromJsonReferenceable(definition, true);
}
private ClientResponse addProperty(String guid, String property, String value) {
......@@ -419,7 +420,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
new AttributeDefinition("serde2",
"serdeType", Multiplicity.REQUIRED, false, null),
new AttributeDefinition("database",
DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
DATABASE_TYPE, Multiplicity.REQUIRED, true, null),
new AttributeDefinition("compressed",
DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, true, null));
HierarchicalTypeDefinition<TraitType> classificationTraitDefinition =
TypesUtil.createTraitTypeDef("classification",
......@@ -460,6 +463,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
tableInstance.set("level", 2);
tableInstance.set("tableType", 1); // enum
tableInstance.set("database", databaseInstance);
tableInstance.set("compressed", false);
Struct traitInstance = (Struct) tableInstance.getTrait("classification");
traitInstance.set("tag", "foundation_etl");
......
......@@ -24,12 +24,14 @@ import org.testng.annotations.Test;
import java.net.HttpURLConnection;
import java.net.URL;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
public class SecureEmbeddedServerIT extends SecureEmbeddedServerITBase{
@Test
public void testServerConfiguredUsingCredentialProvider() throws Exception {
// setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider
setupCredentials();
......
......@@ -45,6 +45,8 @@ import java.net.URL;
import java.nio.file.Files;
import java.util.List;
import static org.apache.hadoop.metadata.security.SecurityProperties.*;
/**
*
*/
......@@ -69,7 +71,7 @@ public class SecureEmbeddedServerITBase {
return false;
}
});
System.setProperty("javax.net.ssl.trustStore", SecureEmbeddedServer.DEFAULT_KEYSTORE_FILE_LOCATION);
System.setProperty("javax.net.ssl.trustStore", DEFAULT_KEYSTORE_FILE_LOCATION);
System.setProperty("javax.net.ssl.trustStorePassword", "keypass");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
......@@ -122,7 +124,7 @@ public class SecureEmbeddedServerITBase {
public void testMissingEntriesInCredentialProvider() throws Exception {
// setup the configuration
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
try {
secureEmbeddedServer = new SecureEmbeddedServer(21443, "webapp/target/metadata-governance") {
......@@ -147,7 +149,7 @@ public class SecureEmbeddedServerITBase {
@Test
public void runOtherSuitesAgainstSecureServer() throws Exception {
final PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setProperty(SecureEmbeddedServer.CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
// setup the credential provider
setupCredentials();
......@@ -198,15 +200,15 @@ public class SecureEmbeddedServerITBase {
char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.KEYSTORE_PASSWORD_KEY, storepass);
KEYSTORE_PASSWORD_KEY, storepass);
char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.TRUSTSTORE_PASSWORD_KEY, trustpass);
TRUSTSTORE_PASSWORD_KEY, trustpass);
char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
provider.createCredentialEntry(
SecureEmbeddedServer.SERVER_CERT_PASSWORD_KEY, certpass);
SERVER_CERT_PASSWORD_KEY, certpass);
// write out so that it can be found in checks
provider.flush();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment